diff --git a/.devcontainer/README.md b/.devcontainer/README.md index f190587bc160..a589b1d46bc6 100644 --- a/.devcontainer/README.md +++ b/.devcontainer/README.md @@ -124,7 +124,7 @@ To configure access to the PostgreSQL database using the VS Code extension: - Password: `postgres` - Port: `5432` - Use an ssl connection: "Standard connection" - - Database: `blockscout` + - Database: `app` - The display name: "" These credentials are derived from the `DATABASE_URL` in the `bs` script. diff --git a/.devcontainer/bin/bs b/.devcontainer/bin/bs index 5a6e9a5121cc..145c152531dc 100755 --- a/.devcontainer/bin/bs +++ b/.devcontainer/bin/bs @@ -1,12 +1,40 @@ #!/bin/bash -# This script helps to orchestrate typical tasks when developing Blockscout +# Blockscout Development Helper Script +# +# This script provides a unified interface for common development tasks when working +# with the Blockscout backend server. It handles environment configuration, project +# initialization, and various development workflows. +# +# Main usage scenarios: +# 1. Project Setup +# - Initialize project directory: bs --init +# - Setup/reset database: bs --db-init +# +# 2. Development Tasks +# - Run backend server: bs +# - Run server (API only): bs --no-sync +# - Compile/recompile changes: bs --compile +# - Recompile dependencies: bs --recompile +# +# 3. Code Quality +# - Run formatter: bs --format +# - Run static analysis: bs --dialyzer +# - Run code style checks: bs --credo +# - Run spell checker: bs --spellcheck +# +# 4. Documentation +# - Generate project docs: bs --docs +# - Show usage help: bs --help +# +# Environment: +# - Loads configuration from .devcontainer/.blockscout_config if present +# - Uses default DATABASE_URL if not specified +# - Supports chain-specific configurations via CHAIN_TYPE source $(dirname $0)/utils -# cd $(dirname $0)/../../ - -# Source and exaport environment variables related to the backend configuration +# Source and export environment variables related to the backend configuration BLOCKSCOUT_CONFIG_FILE=".devcontainer/.blockscout_config" if [ -f "./${BLOCKSCOUT_CONFIG_FILE}" ]; then set -a # Automatically export all variables @@ -17,13 +45,14 @@ else fi if [ "${DATABASE_URL}" == "" ]; then - export DATABASE_URL="postgresql://postgres:postgres@db:5432/blockscout" + export DATABASE_URL="postgresql://postgres:postgres@db:5432/app" fi # Initialize variables INIT=false NO_SYNC=false DB_INIT=false +COMPILE=false RECOMPILE=false SPELLCHECK=false DIALYZER=false @@ -32,6 +61,53 @@ FORMAT=false DOCS=false HELP=false +# Define the help function +show_help() { + echo "Usage: bs [OPTION]" + echo "Orchestrate typical tasks when developing Blockscout backend server" + echo + echo "Options:" + echo " --help Show this help message and exit" + echo " --init Initialize the project directory" + echo " --format Run code formatter" + echo " --spellcheck Run spellcheck" + echo " --dialyzer Run dialyzer" + echo " --credo Run credo" + echo " --docs Generate documentation" + echo " --compile Compile/recompile changes" + echo " --recompile Re-fetch dependencies and recompile" + echo " --db-init (Re)initialize the database" + echo " --no-sync Run the server with disabled indexer, so only the API is available" + echo + echo "If no option is provided, the script will run the backend server." +} + +# Define valid arguments +VALID_ARGS=( + "--help" + "--init" + "--no-sync" + "--db-init" + "--compile" + "--recompile" + "--spellcheck" + "--dialyzer" + "--credo" + "--format" + "--docs" +) + +# Validate arguments +for arg in "$@" +do + if [[ ! " ${VALID_ARGS[@]} " =~ " ${arg} " ]]; then + echo "Error: Unknown argument '${arg}'" + echo + show_help + exit 1 + fi +done + # Parse command line arguments for arg in "$@" do @@ -52,6 +128,10 @@ do DB_INIT=true shift # Remove --db-init from processing ;; + --compile) + COMPILE=true + shift # Remove --compile from processing + ;; --recompile) RECOMPILE=true shift # Remove --recompile from processing @@ -69,7 +149,7 @@ do shift # Remove --credo from processing ;; --format) - FORMAT=true + FORMAT=true shift # Remove --format from processing ;; --docs) @@ -79,26 +159,6 @@ do esac done -# Define the help function -show_help() { - echo "Usage: bs [OPTION]" - echo "Orchestrate typical tasks when developing Blockscout backend server" - echo - echo "Options:" - echo " --help Show this help message and exit" - echo " --init Initialize the project directory" - echo " --format Run code formatter" - echo " --spellcheck Run spellcheck" - echo " --dialyzer Run dialyzer" - echo " --credo Run credo" - echo " --docs Generate documentation" - echo " --recompile Re-fetch dependencies and recompile" - echo " --db-init (Re)initialize the database" - echo " --no-sync Run the server with disabled indexer, so only the API is available" - echo - echo "If no option is provided, the script will run the backend server." -} - # If --help argument is passed, show help and exit if [ "$HELP" = true ]; then show_help @@ -130,16 +190,28 @@ initialize_project() { # Define the initialization subroutine initialize_db() { echo "Initializing database. Step 1 of 2: Dropping database" - mix ecto.drop > /dev/null 2>&1 - echo "Initializing database. Step 2 of 2: Creating database" - mix do ecto.create, ecto.migrate | grep Runn + if OUTPUT=$(mix ecto.drop 2>&1); then + echo "Initializing database. Step 2 of 2: Creating database" + mix do ecto.create, ecto.migrate | grep Runn + else + echo "Failed to drop database. Initialization aborted." + echo "Error output:" + echo "$OUTPUT" + return 1 + fi +} + +# Define the compile subroutine +compile() { + mix compile } # Define the recompile subroutine recompile() { - mix deps.clean block_scout_web - mix deps.clean explorer - mix deps.clean indexer + FALLBACK_APPS="block_scout_web ethereum_jsonrpc explorer indexer utils nft_media_handler" + APPS=$($(dirname $0)/extract_apps.exs) || APPS="$FALLBACK_APPS" + [ -z "$APPS" ] && APPS="$FALLBACK_APPS" + mix deps.clean $APPS mix deps.get mix deps.compile --force } @@ -151,7 +223,12 @@ spellcheck() { # Define the dialyzer subroutine dialyzer() { - mix dialyzer + if ! mix dialyzer; then + echo -e "\nDepending on the error you see, try either:" + echo " rm -rf 'priv/plts'" + echo " MIX_ENV=test bs --recompile" + return 1 + fi } # Define the credo subroutine @@ -181,6 +258,12 @@ if [ "$DB_INIT" = true ]; then exit 0 fi +# If --compile argument is passed, run the compile subroutine and exit +if [ "$COMPILE" = true ]; then + compile + exit 0 +fi + # If --recompile argument is passed, run the recompile subroutine and exit if [ "$RECOMPILE" = true ]; then recompile diff --git a/.devcontainer/bin/extract_apps.exs b/.devcontainer/bin/extract_apps.exs new file mode 100755 index 000000000000..a82f87c95d67 --- /dev/null +++ b/.devcontainer/bin/extract_apps.exs @@ -0,0 +1,45 @@ +#!/usr/bin/env elixir + +defmodule LocalHelper do + # Helper function to safely get configuration values + def get_config_value(config, key, name) do + case Keyword.get(config, key) do + nil -> {:error, name} + value -> {:ok, value} + end + end +end + +# Start Mix application +Mix.start() + +# Set the Mix environment to dev (or whatever environment you need) +Mix.env(:dev) + +# Read and evaluate the mix.exs file +Code.require_file("mix.exs") + +# Get the applications from the project configuration +apps = + try do + project = BlockScout.Mixfile.project() + + with {:ok, releases} <- LocalHelper.get_config_value(project, :releases, "releases"), + {:ok, blockscout} <- LocalHelper.get_config_value(releases, :blockscout, "blockscout release"), + {:ok, applications} <- LocalHelper.get_config_value(blockscout, :applications, "applications") do + applications + |> Keyword.keys() + |> Enum.join("\n") + else + {:error, message} -> + IO.puts(:stderr, "Error: #{message} not found in mix.exs configuration") + System.halt(1) + end + rescue + error -> + IO.puts(:stderr, "Error: Failed to read mix.exs configuration - #{Exception.message(error)}") + System.halt(1) + end + +# Print the applications to stdout +IO.puts(apps) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 280ceeafd3ef..b02f2981ad5f 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -16,7 +16,7 @@ services: command: sleep infinity db: - image: postgres:latest + image: postgres:17 command: postgres -c 'max_connections=250' restart: unless-stopped volumes: diff --git a/.dialyzer-ignore b/.dialyzer-ignore index 941f77a1f158..c062e67b7176 100644 --- a/.dialyzer-ignore +++ b/.dialyzer-ignore @@ -6,3 +6,6 @@ lib/explorer/smart_contract/solidity/publisher_worker.ex:8 lib/explorer/smart_contract/vyper/publisher_worker.ex:8 lib/explorer/smart_contract/stylus/publisher_worker.ex:8 lib/phoenix/router.ex:402 +lib/explorer/chain/search.ex:80 +lib/explorer/chain/search.ex:183 +lib/explorer/chain/search.ex:271 \ No newline at end of file diff --git a/.github/workflows/antiscam.yml b/.github/workflows/antiscam.yml new file mode 100644 index 000000000000..0c8702be7b3c --- /dev/null +++ b/.github/workflows/antiscam.yml @@ -0,0 +1,22 @@ +name: antiscam + +on: + issue_comment: + types: + - created + - edited + +permissions: + pull-requests: write + issues: write + +jobs: + build: + if: ${{ !github.event.issue.pull_request }} + name: Antiscam + runs-on: ubuntu-latest + + steps: + - uses: vbaranov/antiscam-action@main + with: + token: ${{ github.token }} \ No newline at end of file diff --git a/.github/workflows/antispam.yml b/.github/workflows/antispam.yml new file mode 100644 index 000000000000..4c67e035d5d3 --- /dev/null +++ b/.github/workflows/antispam.yml @@ -0,0 +1,22 @@ +name: antispam + +on: + issues: + types: + - opened + - edited + - reopened + +permissions: + pull-requests: write + issues: write + +jobs: + build: + name: Antispam + runs-on: ubuntu-latest + + steps: + - uses: vbaranov/antispam-action@main + with: + token: ${{ github.token }} diff --git a/.github/workflows/config.yml b/.github/workflows/config.yml index af751b9c0976..b793a1ca23c1 100644 --- a/.github/workflows/config.yml +++ b/.github/workflows/config.yml @@ -498,7 +498,7 @@ jobs: - matrix-builder services: postgres: - image: postgres:15 + image: postgres:17 env: # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database POSTGRES_DB: explorer_test @@ -561,7 +561,7 @@ jobs: - matrix-builder services: postgres: - image: postgres:15 + image: postgres:17 env: # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database POSTGRES_DB: explorer_test @@ -636,7 +636,7 @@ jobs: - matrix-builder services: postgres: - image: postgres:15 + image: postgres:17 env: # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database POSTGRES_DB: explorer_test @@ -708,7 +708,7 @@ jobs: - 6379:6379 postgres: - image: postgres:15 + image: postgres:17 env: # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database POSTGRES_DB: explorer_test diff --git a/.github/workflows/pre-release-arbitrum.yml b/.github/workflows/pre-release-arbitrum.yml index 23d40950aeb7..974fb1b53690 100644 --- a/.github/workflows/pre-release-arbitrum.yml +++ b/.github/workflows/pre-release-arbitrum.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo diff --git a/.github/workflows/pre-release-blackfort.yml b/.github/workflows/pre-release-blackfort.yml index 1ec25b0f9745..695bfe25f1e1 100644 --- a/.github/workflows/pre-release-blackfort.yml +++ b/.github/workflows/pre-release-blackfort.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort \ No newline at end of file diff --git a/.github/workflows/pre-release-celo.yml b/.github/workflows/pre-release-celo.yml index 6c358f235bcc..2f1f726199e8 100644 --- a/.github/workflows/pre-release-celo.yml +++ b/.github/workflows/pre-release-celo.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: - uses: actions/checkout@v4 @@ -42,13 +42,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -66,11 +59,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -88,11 +76,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -110,13 +93,6 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -135,12 +111,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -159,12 +130,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo diff --git a/.github/workflows/pre-release-eth.yml b/.github/workflows/pre-release-eth.yml index 91ffd072bf0a..62e7359366c5 100644 --- a/.github/workflows/pre-release-eth.yml +++ b/.github/workflows/pre-release-eth.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -87,11 +75,59 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - CHAIN_TYPE=ethereum \ No newline at end of file + CHAIN_TYPE=ethereum + + - name: Build and push Docker image for Ethereum (indexer + API + shrink internal transactions) + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-shrink-internal-txs + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true + + - name: Build and push Docker image for Ethereum (indexer + shrink internal transactions) + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-shrink-internal-txs-indexer + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true + + - name: Build and push Docker image for Ethereum (API + shrink internal transactions) + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-shrink-internal-txs-api + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/pre-release-filecoin.yml b/.github/workflows/pre-release-filecoin.yml index bf4e309bc30e..290378445310 100644 --- a/.github/workflows/pre-release-filecoin.yml +++ b/.github/workflows/pre-release-filecoin.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -108,13 +91,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -132,12 +108,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -155,12 +126,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin diff --git a/.github/workflows/pre-release-optimism.yml b/.github/workflows/pre-release-optimism.yml index c51a5d23c702..bbc74c0ef6ec 100644 --- a/.github/workflows/pre-release-optimism.yml +++ b/.github/workflows/pre-release-optimism.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -108,14 +91,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -132,13 +108,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -155,13 +126,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/pre-release-polygon-zkevm.yml b/.github/workflows/pre-release-polygon-zkevm.yml index 90d5e2558166..3f106bad3673 100644 --- a/.github/workflows/pre-release-polygon-zkevm.yml +++ b/.github/workflows/pre-release-polygon-zkevm.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -108,14 +91,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -132,13 +108,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true - DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -155,13 +125,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true - DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/pre-release-redstone.yml b/.github/workflows/pre-release-redstone.yml index 8611fa5c0c5e..c076dae09eb0 100644 --- a/.github/workflows/pre-release-redstone.yml +++ b/.github/workflows/pre-release-redstone.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -66,11 +59,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -89,11 +77,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism diff --git a/.github/workflows/pre-release-scroll.yml b/.github/workflows/pre-release-scroll.yml index 879e7ec0740d..8687f116b6ea 100644 --- a/.github/workflows/pre-release-scroll.yml +++ b/.github/workflows/pre-release-scroll.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -108,13 +91,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -132,12 +108,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true - DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -155,12 +125,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true - DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll diff --git a/.github/workflows/pre-release-shibarium.yml b/.github/workflows/pre-release-shibarium.yml index bf75929d8ed3..5a256aabc5c1 100644 --- a/.github/workflows/pre-release-shibarium.yml +++ b/.github/workflows/pre-release-shibarium.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/pre-release-zilliqa.yml b/.github/workflows/pre-release-zilliqa.yml index e01858682424..bb70faf14b6d 100644 --- a/.github/workflows/pre-release-zilliqa.yml +++ b/.github/workflows/pre-release-zilliqa.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -108,13 +91,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -132,12 +108,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -155,12 +126,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa diff --git a/.github/workflows/pre-release-zksync.yml b/.github/workflows/pre-release-zksync.yml index 0f08894580a3..5c41a93e63e7 100644 --- a/.github/workflows/pre-release-zksync.yml +++ b/.github/workflows/pre-release-zksync.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,13 +41,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -65,11 +58,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -87,11 +75,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -108,13 +91,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -133,11 +109,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -156,11 +127,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index c0096e7fd050..8b1dcba6127b 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -43,13 +43,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= @@ -73,11 +66,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= @@ -101,13 +89,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= @@ -130,13 +111,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= @@ -161,11 +135,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= @@ -190,13 +159,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= diff --git a/.github/workflows/publish-docker-image-every-push.yml b/.github/workflows/publish-docker-image-every-push.yml index 4c141897ca7c..c94959d640d7 100644 --- a/.github/workflows/publish-docker-image-every-push.yml +++ b/.github/workflows/publish-docker-image-every-push.yml @@ -11,7 +11,7 @@ on: env: OTP_VERSION: ${{ vars.OTP_VERSION }} ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 jobs: push_to_registry: @@ -43,19 +43,12 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build and push Docker image (indexer) @@ -71,17 +64,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build and push Docker image (API) @@ -97,17 +85,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build and push Docker image for frontend @@ -123,15 +106,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= SESSION_COOKIE_DOMAIN=k8s-dev.blockscout.com - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} deploy_e2e: needs: push_to_registry diff --git a/.github/workflows/publish-docker-image-for-arbitrum.yml b/.github/workflows/publish-docker-image-for-arbitrum.yml index 06dd671ad3a1..7ab76e29eb41 100644 --- a/.github/workflows/publish-docker-image-for-arbitrum.yml +++ b/.github/workflows/publish-docker-image-for-arbitrum.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: arbitrum steps: - uses: actions/checkout@v4 @@ -36,7 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -53,7 +53,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -70,7 +70,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -86,7 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -104,7 +104,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -122,7 +122,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-blackfort.yml b/.github/workflows/publish-docker-image-for-blackfort.yml index 83d5a1fee86b..9c77e3363078 100644 --- a/.github/workflows/publish-docker-image-for-blackfort.yml +++ b/.github/workflows/publish-docker-image-for-blackfort.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: blackfort steps: - uses: actions/checkout@v4 @@ -39,13 +39,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-celo.yml b/.github/workflows/publish-docker-image-for-celo.yml index e73638df14c1..729908352860 100644 --- a/.github/workflows/publish-docker-image-for-celo.yml +++ b/.github/workflows/publish-docker-image-for-celo.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: celo API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: @@ -38,14 +38,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -63,12 +56,7 @@ jobs: build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -86,12 +74,7 @@ jobs: build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -108,14 +91,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -133,13 +109,8 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -157,13 +128,8 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true diff --git a/.github/workflows/publish-docker-image-for-core.yml b/.github/workflows/publish-docker-image-for-core.yml index d0957f0ab45a..4c3e533903fe 100644 --- a/.github/workflows/publish-docker-image-for-core.yml +++ b/.github/workflows/publish-docker-image-for-core.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: poa steps: - uses: actions/checkout@v4 @@ -36,12 +36,5 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-eth-sepolia.yml b/.github/workflows/publish-docker-image-for-eth-sepolia.yml index eba3c88a9b3b..eee2beb20e3f 100644 --- a/.github/workflows/publish-docker-image-for-eth-sepolia.yml +++ b/.github/workflows/publish-docker-image-for-eth-sepolia.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: eth-sepolia steps: - uses: actions/checkout@v4 @@ -36,7 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -53,7 +53,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -70,7 +70,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -86,7 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -104,7 +104,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -122,7 +122,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}-shrink-internal-txs RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-eth.yml b/.github/workflows/publish-docker-image-for-eth.yml index 210f06150a2f..932371ffe91b 100644 --- a/.github/workflows/publish-docker-image-for-eth.yml +++ b/.github/workflows/publish-docker-image-for-eth.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: mainnet steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED=false - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -59,13 +52,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -81,12 +69,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-filecoin.yml b/.github/workflows/publish-docker-image-for-filecoin.yml index 797ebe61a0bf..ed49016270d1 100644 --- a/.github/workflows/publish-docker-image-for-filecoin.yml +++ b/.github/workflows/publish-docker-image-for-filecoin.yml @@ -9,7 +9,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: filecoin steps: - uses: actions/checkout@v4 @@ -35,14 +35,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -59,12 +52,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -81,12 +69,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -102,14 +85,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -126,13 +102,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -149,13 +120,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-fuse.yml b/.github/workflows/publish-docker-image-for-fuse.yml index 4931f89775fa..1804f079d8e4 100644 --- a/.github/workflows/publish-docker-image-for-fuse.yml +++ b/.github/workflows/publish-docker-image-for-fuse.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: fuse steps: - uses: actions/checkout@v4 @@ -37,12 +37,5 @@ jobs: linux/arm64/v8 build-args: | BRIDGED_TOKENS_ENABLED=true - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-gnosis-chain.yml b/.github/workflows/publish-docker-image-for-gnosis-chain.yml index 1b5f1db55d0b..e02cb6e536e9 100644 --- a/.github/workflows/publish-docker-image-for-gnosis-chain.yml +++ b/.github/workflows/publish-docker-image-for-gnosis-chain.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: xdai steps: - uses: actions/checkout@v4 @@ -24,7 +24,7 @@ jobs: docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }} docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }} - - name: Build and push Docker image + - name: Build and push Docker image (indexer + API) uses: docker/build-push-action@v6 with: context: . @@ -37,13 +37,40 @@ jobs: linux/arm64/v8 build-args: | BRIDGED_TOKENS_ENABLED=true - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image (indexer) + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_API=true + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} + RELEASE_VERSION=${{ env.RELEASE_VERSION }} + CHAIN_TYPE=ethereum + + - name: Build and push Docker image (API) + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + push: true + tags: blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-api + labels: ${{ steps.setup.outputs.docker-labels }} + platforms: | + linux/amd64 + linux/arm64/v8 + build-args: | + DISABLE_INDEXER=true + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-l2-staging.yml b/.github/workflows/publish-docker-image-for-l2-staging.yml index abccf4862355..162f6ea8ce6d 100644 --- a/.github/workflows/publish-docker-image-for-l2-staging.yml +++ b/.github/workflows/publish-docker-image-for-l2-staging.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: optimism-l2-advanced steps: - uses: actions/checkout@v4 @@ -36,12 +36,5 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-lukso.yml b/.github/workflows/publish-docker-image-for-lukso.yml index 3af3ac67f165..dc2a700c896a 100644 --- a/.github/workflows/publish-docker-image-for-lukso.yml +++ b/.github/workflows/publish-docker-image-for-lukso.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: lukso steps: - uses: actions/checkout@v4 @@ -36,12 +36,5 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-optimism.yml b/.github/workflows/publish-docker-image-for-optimism.yml index 2ca241b2b1d8..28f43c8a78f9 100644 --- a/.github/workflows/publish-docker-image-for-optimism.yml +++ b/.github/workflows/publish-docker-image-for-optimism.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: optimism steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -59,13 +52,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -81,13 +69,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -103,14 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -127,13 +103,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -150,13 +121,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-polygon-edge.yml b/.github/workflows/publish-docker-image-for-polygon-edge.yml index fd336962c29d..8f728b5ef815 100644 --- a/.github/workflows/publish-docker-image-for-polygon-edge.yml +++ b/.github/workflows/publish-docker-image-for-polygon-edge.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: polygon-edge steps: - uses: actions/checkout@v4 @@ -36,13 +36,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_edge \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-redstone.yml b/.github/workflows/publish-docker-image-for-redstone.yml index 8e33916160dd..b2e0ed7468e1 100644 --- a/.github/workflows/publish-docker-image-for-redstone.yml +++ b/.github/workflows/publish-docker-image-for-redstone.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: redstone steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism MUD_INDEXER_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-rootstock.yml b/.github/workflows/publish-docker-image-for-rootstock.yml index a50526ea0664..100f8a9ebb77 100644 --- a/.github/workflows/publish-docker-image-for-rootstock.yml +++ b/.github/workflows/publish-docker-image-for-rootstock.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: rsk steps: - uses: actions/checkout@v4 @@ -36,13 +36,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=rsk \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-scroll.yml b/.github/workflows/publish-docker-image-for-scroll.yml index a24e39923d22..a40df6ced156 100644 --- a/.github/workflows/publish-docker-image-for-scroll.yml +++ b/.github/workflows/publish-docker-image-for-scroll.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: scroll steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -59,13 +52,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -81,13 +69,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -103,14 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -127,13 +103,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -150,13 +121,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-shibarium.yml b/.github/workflows/publish-docker-image-for-shibarium.yml index 25882949dd7f..c3c0a8b1c441 100644 --- a/.github/workflows/publish-docker-image-for-shibarium.yml +++ b/.github/workflows/publish-docker-image-for-shibarium.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: shibarium steps: - uses: actions/checkout@v4 @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -62,13 +55,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -84,12 +72,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-stability.yml b/.github/workflows/publish-docker-image-for-stability.yml index 34c5bf65b8cb..c0c08e558eee 100644 --- a/.github/workflows/publish-docker-image-for-stability.yml +++ b/.github/workflows/publish-docker-image-for-stability.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: stability steps: - uses: actions/checkout@v4 @@ -39,13 +39,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=stability \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-suave.yml b/.github/workflows/publish-docker-image-for-suave.yml index 0da3dbdcd576..bc822049463e 100644 --- a/.github/workflows/publish-docker-image-for-suave.yml +++ b/.github/workflows/publish-docker-image-for-suave.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: suave steps: - uses: actions/checkout@v4 @@ -39,13 +39,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=suave \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-zetachain.yml b/.github/workflows/publish-docker-image-for-zetachain.yml index f1794658b6cc..61d64f0405f4 100644 --- a/.github/workflows/publish-docker-image-for-zetachain.yml +++ b/.github/workflows/publish-docker-image-for-zetachain.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: zetachain steps: - uses: actions/checkout@v4 @@ -36,13 +36,6 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zetachain \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-zilliqa.yml b/.github/workflows/publish-docker-image-for-zilliqa.yml index 4893d6db99a2..e1abf3d7bf15 100644 --- a/.github/workflows/publish-docker-image-for-zilliqa.yml +++ b/.github/workflows/publish-docker-image-for-zilliqa.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: zilliqa steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -59,13 +52,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -81,13 +69,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} @@ -103,14 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -127,13 +103,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -150,13 +121,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true diff --git a/.github/workflows/publish-docker-image-for-zkevm.yml b/.github/workflows/publish-docker-image-for-zkevm.yml index a68643cdbdad..8038c7bd4aec 100644 --- a/.github/workflows/publish-docker-image-for-zkevm.yml +++ b/.github/workflows/publish-docker-image-for-zkevm.yml @@ -10,7 +10,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: zkevm steps: - uses: actions/checkout@v4 @@ -36,14 +36,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -59,13 +52,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -81,13 +69,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -103,14 +86,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -127,13 +103,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -150,13 +121,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-for-zksync.yml b/.github/workflows/publish-docker-image-for-zksync.yml index 0187041b0fd4..163ce990d946 100644 --- a/.github/workflows/publish-docker-image-for-zksync.yml +++ b/.github/workflows/publish-docker-image-for-zksync.yml @@ -9,7 +9,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 DOCKER_CHAIN_NAME: zksync steps: - uses: actions/checkout@v4 @@ -35,14 +35,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -58,13 +51,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -80,13 +68,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -102,14 +85,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -125,13 +101,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -147,12 +118,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync \ No newline at end of file diff --git a/.github/workflows/publish-docker-image-staging-on-demand.yml b/.github/workflows/publish-docker-image-staging-on-demand.yml index 5983a75ea663..9e6e5f2cf1f1 100644 --- a/.github/workflows/publish-docker-image-staging-on-demand.yml +++ b/.github/workflows/publish-docker-image-staging-on-demand.yml @@ -12,7 +12,7 @@ on: env: OTP_VERSION: ${{ vars.OTP_VERSION }} ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 jobs: push_to_registry: @@ -44,17 +44,11 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/publish-regular-docker-image-on-demand.yml b/.github/workflows/publish-regular-docker-image-on-demand.yml index fe0cb2d354f9..3d206764c3d1 100644 --- a/.github/workflows/publish-regular-docker-image-on-demand.yml +++ b/.github/workflows/publish-regular-docker-image-on-demand.yml @@ -5,7 +5,7 @@ on: env: OTP_VERSION: ${{ vars.OTP_VERSION }} ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }} - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 jobs: push_to_registry: @@ -37,19 +37,12 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build and push Docker image (indexer) @@ -65,17 +58,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build and push Docker image (API) @@ -91,15 +79,10 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta.+commit.${{ env.SHORT_SHA }} + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/release-arbitrum.yml b/.github/workflows/release-arbitrum.yml index ef7f19a2b5d5..e82f30f8c8b1 100644 --- a/.github/workflows/release-arbitrum.yml +++ b/.github/workflows/release-arbitrum.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=arbitrum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-blackfort.yml b/.github/workflows/release-blackfort.yml index baaa09c92ecf..47a4604d5e21 100644 --- a/.github/workflows/release-blackfort.yml +++ b/.github/workflows/release-blackfort.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=blackfort \ No newline at end of file diff --git a/.github/workflows/release-celo.yml b/.github/workflows/release-celo.yml index 67e534c08677..5a97389d1010 100644 --- a/.github/workflows/release-celo.yml +++ b/.github/workflows/release-celo.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: - uses: actions/checkout@v4 @@ -41,14 +41,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -66,12 +59,7 @@ jobs: build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -89,12 +77,7 @@ jobs: build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo @@ -111,14 +94,7 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -136,13 +112,8 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -160,13 +131,8 @@ jobs: linux/arm64/v8 build-args: | API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }} - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=celo SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-eth.yml b/.github/workflows/release-eth.yml index a510d02bfaa2..ecd945305df5 100644 --- a/.github/workflows/release-eth.yml +++ b/.github/workflows/release-eth.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=ethereum SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-filecoin.yml b/.github/workflows/release-filecoin.yml index 41e41b4b1287..88fcc70f6def 100644 --- a/.github/workflows/release-filecoin.yml +++ b/.github/workflows/release-filecoin.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -130,13 +106,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -153,13 +124,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=filecoin SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-fuse.yml b/.github/workflows/release-fuse.yml index 1465cd598551..85e2d503b62d 100644 --- a/.github/workflows/release-fuse.yml +++ b/.github/workflows/release-fuse.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-gnosis.yml b/.github/workflows/release-gnosis.yml index 2f69d7bdb2c3..3483b9e7c200 100644 --- a/.github/workflows/release-gnosis.yml +++ b/.github/workflows/release-gnosis.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true CHAIN_TYPE=ethereum @@ -64,12 +57,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true CHAIN_TYPE=ethereum @@ -87,12 +75,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} BRIDGED_TOKENS_ENABLED=true CHAIN_TYPE=ethereum \ No newline at end of file diff --git a/.github/workflows/release-optimism.yml b/.github/workflows/release-optimism.yml index 469cf2d65850..744e8669fa54 100644 --- a/.github/workflows/release-optimism.yml +++ b/.github/workflows/release-optimism.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-polygon-edge.yml b/.github/workflows/release-polygon-edge.yml index 5e4f49a659b5..372f5ec9bcd4 100644 --- a/.github/workflows/release-polygon-edge.yml +++ b/.github/workflows/release-polygon-edge.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_edge @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_edge @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_edge \ No newline at end of file diff --git a/.github/workflows/release-polygon-zkevm.yml b/.github/workflows/release-polygon-zkevm.yml index 895afd996c58..6ce1c6d4283f 100644 --- a/.github/workflows/release-polygon-zkevm.yml +++ b/.github/workflows/release-polygon-zkevm.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -130,13 +106,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -153,13 +124,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=polygon_zkevm SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-redstone.yml b/.github/workflows/release-redstone.yml index 1b9b106743ce..53d9bf804d31 100644 --- a/.github/workflows/release-redstone.yml +++ b/.github/workflows/release-redstone.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism MUD_INDEXER_ENABLED=true @@ -64,12 +57,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism MUD_INDEXER_ENABLED=true @@ -87,12 +75,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=optimism MUD_INDEXER_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-rootstock.yml b/.github/workflows/release-rootstock.yml index af1c5da74c1a..2e288f9db7c5 100644 --- a/.github/workflows/release-rootstock.yml +++ b/.github/workflows/release-rootstock.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=rsk @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=rsk @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=rsk diff --git a/.github/workflows/release-scroll.yml b/.github/workflows/release-scroll.yml index 0168b8c2c497..adb77e921fd2 100644 --- a/.github/workflows/release-scroll.yml +++ b/.github/workflows/release-scroll.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=scroll SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-shibarium.yml b/.github/workflows/release-shibarium.yml index f8735b1cc5fc..0e9f9ba20dc2 100644 --- a/.github/workflows/release-shibarium.yml +++ b/.github/workflows/release-shibarium.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=shibarium SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release-stability.yml b/.github/workflows/release-stability.yml index a11a44dd9d1d..285c5871a09b 100644 --- a/.github/workflows/release-stability.yml +++ b/.github/workflows/release-stability.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=stability @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=stability @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=stability \ No newline at end of file diff --git a/.github/workflows/release-suave.yml b/.github/workflows/release-suave.yml index 12de53d2d67d..a654647cb33a 100644 --- a/.github/workflows/release-suave.yml +++ b/.github/workflows/release-suave.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=suave @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=suave @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=suave \ No newline at end of file diff --git a/.github/workflows/release-zetachain.yml b/.github/workflows/release-zetachain.yml index f1b9d65251d4..c84ba1de8bd7 100644 --- a/.github/workflows/release-zetachain.yml +++ b/.github/workflows/release-zetachain.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zetachain @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zetachain @@ -85,11 +73,6 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zetachain \ No newline at end of file diff --git a/.github/workflows/release-zilliqa.yml b/.github/workflows/release-zilliqa.yml index d9e8e6174a24..7632041c73e4 100644 --- a/.github/workflows/release-zilliqa.yml +++ b/.github/workflows/release-zilliqa.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - API_V1_READ_METHODS_DISABLED=false - DISABLE_WEBAPP=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -130,13 +106,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_API=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -153,13 +124,8 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - CACHE_EXCHANGE_RATES_PERIOD= - DISABLE_WEBAPP=true DISABLE_INDEXER=true - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - ADMIN_PANEL_ENABLED=false - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zilliqa SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true diff --git a/.github/workflows/release-zksync.yml b/.github/workflows/release-zksync.yml index b6a5e0a42210..92462cadcf78 100644 --- a/.github/workflows/release-zksync.yml +++ b/.github/workflows/release-zksync.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -39,14 +39,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -63,12 +56,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -85,12 +73,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync @@ -106,14 +89,7 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -131,12 +107,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -154,12 +125,7 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} CHAIN_TYPE=zksync SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 44cdee0afe69..ff98d796fedc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest env: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 steps: - uses: actions/checkout@v4 - name: Setup repo @@ -41,19 +41,12 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build & Push Core Docker image (indexer) @@ -71,17 +64,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build & Push Core Docker image (API) @@ -99,19 +87,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} - name: Build & Push Core Docker image (indexer + API + shrink internal transactions) @@ -128,19 +109,12 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -159,17 +133,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_API=true - DISABLE_WEBAPP=true - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -188,19 +157,12 @@ jobs: linux/arm64/v8 build-args: | DISABLE_INDEXER=true - DISABLE_WEBAPP=true - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} SHRINK_INTERNAL_TRANSACTIONS_ENABLED=true @@ -218,19 +180,12 @@ jobs: linux/amd64 linux/arm64/v8 build-args: | - DISABLE_WEBAPP=false - API_V1_READ_METHODS_DISABLED=false - API_V1_WRITE_METHODS_DISABLED=false - CACHE_EXCHANGE_RATES_PERIOD= - CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED= - CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL= - ADMIN_PANEL_ENABLED=false DECODE_NOT_A_CONTRACT_CALLS=false MIXPANEL_URL= MIXPANEL_TOKEN= AMPLITUDE_URL= AMPLITUDE_API_KEY= - BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-beta + BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }} RELEASE_VERSION=${{ env.RELEASE_VERSION }} # - name: Send release announcement to Slack workflow @@ -240,7 +195,7 @@ jobs: # payload: | # { # "release-version": "${{ env.RELEASE_VERSION }}", - # "release-link": "https://github.com/blockscout/blockscout/releases/tag/v${{ env.RELEASE_VERSION }}-beta" + # "release-link": "https://github.com/blockscout/blockscout/releases/tag/v${{ env.RELEASE_VERSION }}" # } # env: # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.gitignore b/.gitignore index 911095c2c64a..f879658f9596 100644 --- a/.gitignore +++ b/.gitignore @@ -60,11 +60,18 @@ dump.rdb .vscode .cursorignore .cursorrules +.elixir_ls **.dec** -*.env *.env.example *.env.local *.env.staging -.devcontainer/.blockscout_config \ No newline at end of file +.devcontainer/.blockscout_config + +# dets tables +queue_storage +tasks_in_progress +/dets + +/temp \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index e395998b5d3a..1f2305482401 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,235 @@ # Changelog +## 6.10.1 + +### 🚀 Features + +- Support OP Holocene upgrade ([#11355](https://github.com/blockscout/blockscout/issues/11355)) +- Add active DB connections metric ([#11321](https://github.com/blockscout/blockscout/issues/11321)) +- Add protocol icon to the search result ([#11478](https://github.com/blockscout/blockscout/issues/11478)) + +### 🐛 Bug Fixes + +- Remove unnecessary internal transactions preload ([#11643](https://github.com/blockscout/blockscout/issues/11643)) +- Fix bug in Indexer.Fetcher.EmptyBlocksSanitizer module ([#11636](https://github.com/blockscout/blockscout/pull/11636)) +- Multichain search: process address in chunks ([#11632](https://github.com/blockscout/blockscout/issues/11632)) +- Fix transactions deadlock ([#11623](https://github.com/blockscout/blockscout/issues/11623)) +- Fix tokens and transactions deadlocks ([#11620](https://github.com/blockscout/blockscout/issues/11620)) +- Order address names to return the latest non-primary ([#11612](https://github.com/blockscout/blockscout/issues/11612)) +- Rename tx_burnt_fee prop in API v2 endpoint ([#11563](https://github.com/blockscout/blockscout/issues/11563)) +- Celo fee handler ([#11387](https://github.com/blockscout/blockscout/issues/11387)) +- Fix addresses deadlock ([#11616](https://github.com/blockscout/blockscout/issues/11616)) +- Besu raw trace ([#11413](https://github.com/blockscout/blockscout/issues/11413)) +- Fix tokens deadlock ([#11603](https://github.com/blockscout/blockscout/issues/11603)) +- Set timeout: :infinity for PendingTransactionsSanitizer delete ([#11600](https://github.com/blockscout/blockscout/issues/11600)) +- Fixed Missing Closing Quotation Marks in sed Expressions Update version_bump.sh ([#11574](https://github.com/blockscout/blockscout/issues/11574)) +- The same DA blobs for different Arbitrum batches ([#11485](https://github.com/blockscout/blockscout/issues/11485)) +- Extended list of apps in the devcontainer helper script ([#11396](https://github.com/blockscout/blockscout/issues/11396)) +- Fix MarketHistory test ([#11547](https://github.com/blockscout/blockscout/issues/11547)) +- Advanced-filters csv format ([#11494](https://github.com/blockscout/blockscout/issues/11494)) +- Fix verifyproxycontract endpoint ([#11523](https://github.com/blockscout/blockscout/issues/11523)) +- Fix minor grammatical issue Update README.md ([#11544](https://github.com/blockscout/blockscout/issues/11544)) + +### 📚 Documentation + +- Typo fix Update README.md ([#11595](https://github.com/blockscout/blockscout/issues/11595)) +- Typo fix Update CODE_OF_CONDUCT.md ([#11572](https://github.com/blockscout/blockscout/issues/11572)) +- Fix minor grammar and phrasing inconsistencies Update README.md ([#11548](https://github.com/blockscout/blockscout/issues/11548)) +- Fixed incorrect usage of -d flag in stop containers command Update README.md ([#11522](https://github.com/blockscout/blockscout/issues/11522)) + +### ⚡ Performance + +- Implement batched requests and DB upsert operations Indexer.Fetcher.EmptyBlocksSanitizer module ([#11555](https://github.com/blockscout/blockscout/issues/11555)) + +### ⚙️ Miscellaneous Tasks + +- Remove unused Explorer.Token.InstanceOwnerReader module ([#11570](https://github.com/blockscout/blockscout/issues/11570)) +- Optimize coin balances deriving ([#11613](https://github.com/blockscout/blockscout/issues/11613)) +- Fix typo Update CHANGELOG.md ([#11607](https://github.com/blockscout/blockscout/issues/11607)) +- Add env variable for PendingTransactionsSanitizer interval ([#11601](https://github.com/blockscout/blockscout/issues/11601)) +- Documentation for Explorer.Chain.Transaction.History.Historian ([#11397](https://github.com/blockscout/blockscout/issues/11397)) +- Extend error message on updating token balance with token id ([#11524](https://github.com/blockscout/blockscout/issues/11524)) + +### New ENV Variables + +| Variable | Description | Parameters | +| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------- | +| `INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL` | Interval between pending transactions sanitizing. Implemented in [#11601](https://github.com/blockscout/blockscout/pull/11601). |

Version: v6.10.1
Default: 1h
Applications: Indexer

| + +## 6.10.0 + +### 🚀 Features + +- Addresses blacklist support ([#11417](https://github.com/blockscout/blockscout/issues/11417)) +- Multichain search DB filling ([#11139](https://github.com/blockscout/blockscout/issues/11139)) +- Zilliqa scilla transactions and smart contracts ([#11069](https://github.com/blockscout/blockscout/issues/11069)) +- CDN ([#10675](https://github.com/blockscout/blockscout/issues/10675)) +- Arbitrum L2->L1 message claiming ([#10804](https://github.com/blockscout/blockscout/issues/10804)) +- Add is_banned to token_instances table ([#11235](https://github.com/blockscout/blockscout/issues/11235)) +- Add CSV export of epoch transactions for address ([#11195](https://github.com/blockscout/blockscout/issues/11195)) +- Add request to /cache/{tx_hash} of transaction interpreter ([#11279](https://github.com/blockscout/blockscout/issues/11279)) +- Switch DB requests from replica to master in case of replica inaccessibility ([#11020](https://github.com/blockscout/blockscout/issues/11020)) +- Add gzip encoding option ([#11292](https://github.com/blockscout/blockscout/issues/11292)) +- Add Stylus verification support ([#11183](https://github.com/blockscout/blockscout/issues/11183)) +- Multiple json rpc urls ([#10934](https://github.com/blockscout/blockscout/issues/10934)) +- Gas prices with base fee if no transactions ([#11132](https://github.com/blockscout/blockscout/issues/11132)) +- Zilliqa consensus data related to block ([#10699](https://github.com/blockscout/blockscout/issues/10699)) +- Add filecoin robust addresses to proxy implementations ([#11102](https://github.com/blockscout/blockscout/issues/11102)) + +### 🐛 Bug Fixes + +- Limit max decimals value ([#11493](https://github.com/blockscout/blockscout/issues/11493)) +- Ignore unknown transaction receipt fields ([#11492](https://github.com/blockscout/blockscout/issues/11492)) +- Fixed issue in db request (l2_to_l1_message_by_id/2) ([#11481](https://github.com/blockscout/blockscout/issues/11481)) +- Handle float time in compose_gas_price/5 ([#11476](https://github.com/blockscout/blockscout/issues/11476)) +- Fix 500 on disabled metadata service ([#11443](https://github.com/blockscout/blockscout/issues/11443)) +- Fix get_media_url_from_metadata_for_nft_media_handler/1 ([#11437](https://github.com/blockscout/blockscout/issues/11437)) +- Fix check-redirect for ENS ([#11435](https://github.com/blockscout/blockscout/issues/11435)) +- Refactor CDN upload functions, prevent saving partially uploaded thumbnails ([#11400](https://github.com/blockscout/blockscout/issues/11400)) +- Take into account several proofs in OP Withdrawals ([#11399](https://github.com/blockscout/blockscout/issues/11399)) +- Handle "null" in paging options ([#11388](https://github.com/blockscout/blockscout/issues/11388)) +- Fix search timeout ([#11277](https://github.com/blockscout/blockscout/issues/11277)) +- Fix Noves.fi endpoints for bulk transactions ([#11375](https://github.com/blockscout/blockscout/issues/11375)) +- Fix docker container build after adding NFT media handler ([#11373](https://github.com/blockscout/blockscout/issues/11373)) +- Handle simultaneous account entities creation ([#11341](https://github.com/blockscout/blockscout/issues/11341)) +- Webscoket configuration ([#11357](https://github.com/blockscout/blockscout/issues/11357)) +- 403 instead of 404 on wrong captcha in api/v1 ([#11348](https://github.com/blockscout/blockscout/issues/11348)) +- Upgrade fallback urls propagation ([#11331](https://github.com/blockscout/blockscout/issues/11331)) +- Add utils to dockerfile ([#11345](https://github.com/blockscout/blockscout/issues/11345)) +- Fix log decoding bug ([#11266](https://github.com/blockscout/blockscout/issues/11266)) +- Return 404 instead of 200 for nonexistent NFT ([#11280](https://github.com/blockscout/blockscout/issues/11280)) +- Fix metrics modules warnings ([#11340](https://github.com/blockscout/blockscout/issues/11340)) +- Handle entries with not specified `retries_count` ([#11206](https://github.com/blockscout/blockscout/issues/11206)) +- Get rid of scientific notation in CSV token holders export ([#11281](https://github.com/blockscout/blockscout/issues/11281)) +- Wrong usage of env in TokenInstanceMetadataRefetch ([#11317](https://github.com/blockscout/blockscout/issues/11317)) +- Rework initialization of the `RollupL1ReorgMonitor` and fix `read_system_config` for fallback cases ([#11275](https://github.com/blockscout/blockscout/issues/11275)) +- Eth_getLogs paging ([#11248](https://github.com/blockscout/blockscout/issues/11248)) +- Handle excessive otp confirmations ([#11244](https://github.com/blockscout/blockscout/issues/11244)) +- Check if flash is fetched before getting it in app.html ([#11270](https://github.com/blockscout/blockscout/issues/11270)) +- Multiple json rpc urls fixes ([#11264](https://github.com/blockscout/blockscout/issues/11264)) +- Handle eth rpc request without params ([#11269](https://github.com/blockscout/blockscout/issues/11269)) +- Fixate 6.9.2 as the latest release ([#11265](https://github.com/blockscout/blockscout/issues/11265)) +- Fix ETH JSON RPC deriving for Stylus verification ([#11247](https://github.com/blockscout/blockscout/issues/11247)) +- Fix fake json_rpc_named_arguments for multiple urls usage ([#11243](https://github.com/blockscout/blockscout/issues/11243)) +- Handle simultaneous api key creation ([#11233](https://github.com/blockscout/blockscout/issues/11233)) +- Fixate 6.9.1 as the latest release in master branch +- Invalid metadata requests ([#11210](https://github.com/blockscout/blockscout/issues/11210)) +- *(nginx-conf)* Redirect `/api-docs` to frontend. ([#11202](https://github.com/blockscout/blockscout/issues/11202)) +- Fix failed filecoin tests ([#11187](https://github.com/blockscout/blockscout/issues/11187)) +- Fix missing `signers` field in nested quorum certificate ([#11185](https://github.com/blockscout/blockscout/issues/11185)) +- Return `l1_tx_hashes` in the response of /batches/da/celestia/... API endpoint ([#11184](https://github.com/blockscout/blockscout/issues/11184)) +- Omit pbo for blocks lower than trace first block for indexing status ([#11053](https://github.com/blockscout/blockscout/issues/11053)) +- Update overview.html.eex ([#11094](https://github.com/blockscout/blockscout/issues/11094)) +- Fix sitemap timeout; optimize OrderedCache preloads ([#11131](https://github.com/blockscout/blockscout/issues/11131)) + +### 🚜 Refactor + +- Cspell configuration ([#11146](https://github.com/blockscout/blockscout/issues/11146)) + +### ⚡ Performance + +- Advanced filters optimization ([#11186](https://github.com/blockscout/blockscout/issues/11186)) + +### ⚙️ Miscellaneous Tasks + +- Return old response format in /api/v1/health endpoint ([#11511](https://github.com/blockscout/blockscout/issues/11511)) +- Rename blob_tx_count per naming conventions ([#11438](https://github.com/blockscout/blockscout/issues/11438)) +- Follow updated response schema in interpreter microservice ([#11402](https://github.com/blockscout/blockscout/issues/11402)) +- Remove raise in case if ETHEREUM_JSONRPC_HTTP_URL is not provided ([#11392](https://github.com/blockscout/blockscout/issues/11392)) +- Optimize tokens import ([#11389](https://github.com/blockscout/blockscout/issues/11389)) +- Remove beta suffix from releases ([#11376](https://github.com/blockscout/blockscout/issues/11376)) +- Background migrations timeout ([#11358](https://github.com/blockscout/blockscout/issues/11358)) +- Remove obsolete compile-time vars ([#11336](https://github.com/blockscout/blockscout/issues/11336)) +- Fixate Postgres 17 version in Docker compose and Github Actions workflows ([#11334](https://github.com/blockscout/blockscout/issues/11334)) +- Remove shorthands-duplicates from API responses ([#11319](https://github.com/blockscout/blockscout/issues/11319)) +- Refactor compile time envs usage ([#11148](https://github.com/blockscout/blockscout/issues/11148)) +- Refactor Dockerfile ([#11130](https://github.com/blockscout/blockscout/issues/11130)) +- Refactor import stages ([#11013](https://github.com/blockscout/blockscout/issues/11013)) +- Optimize CurrentTokenBalances import runner ([#11191](https://github.com/blockscout/blockscout/issues/11191)) +- Fix watchlist address flaking test ([#11242](https://github.com/blockscout/blockscout/issues/11242)) +- OP modules improvements ([#11073](https://github.com/blockscout/blockscout/issues/11073)) +- Invalid association `token_transfers` ([#11204](https://github.com/blockscout/blockscout/issues/11204)) +- Update Github Actions packages versions ([#11144](https://github.com/blockscout/blockscout/issues/11144)) +- Convinient way to manage known_hosts within devcontainer ([#11091](https://github.com/blockscout/blockscout/issues/11091)) +- Add docker compose file without microservices ([#11097](https://github.com/blockscout/blockscout/issues/11097)) + +### New ENV Variables + +| Variable | Description | Parameters | +| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------- | +| `ETHEREUM_JSONRPC_HTTP_URLS` | Analogue of `ETHEREUM_JSONRPC_HTTP_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS` | Analogue of `ETHEREUM_JSONRPC_FALLBACK_HTTP_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_TRACE_URLS` | Analogue of `ETHEREUM_JSONRPC_TRACE_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS` | Analogue of `ETHEREUM_JSONRPC_FALLBACK_TRACE_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_ETH_CALL_URLS` | Analogue of `ETHEREUM_JSONRPC_ETH_CALL_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS` | Analogue of `ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL` for multiple values. Implemented in [#10934](https://github.com/blockscout/blockscout/pull/10934) |

Version: v6.10.0+
Default: (empty)
Applications: API, Indexer

| +| `ETHEREUM_JSONRPC_HTTP_GZIP_ENABLED` | If `true`, then send gzip encoding header and expect encoding in response. Implemented in [#11292](https://github.com/blockscout/blockscout/pull/11292). |

Version: v6.10.0+
Default: false
Applications: API, Indexer

| +| `REPLICA_MAX_LAG` | Defines the max lag for read-only replica. If the actual lag is higher than this, replica is considered unavailable and all requests to it are redirected to main DB. Implemented in [#11020](https://github.com/blockscout/blockscout/pull/11020) |

Version: v6.10.0+
Default: 5m
Applications: API

| +| `SANITIZE_INCORRECT_NFT_TIMEOUT` | Timeout between sanitizing token transfer batches processing. Implemented in [#11358](https://github.com/blockscout/blockscout/pull/11358) |

Version: v6.10.0+
Default: 0
Applications: API, Indexer

| +| `SANITIZE_INCORRECT_WETH_TIMEOUT` | Timeout between sanitizing token transfer batches processing. Implemented in [#11358](https://github.com/blockscout/blockscout/pull/11358) |

Version: v6.10.0+
Default: 0
Applications: API, Indexer

| +| `REINDEX_INTERNAL_TRANSACTIONS_STATUS_BATCH_SIZE` | Number of internal transactions to reindex in the batch. Implemented in [#11358](https://github.com/blockscout/blockscout/pull/11358) |

Version: v6.10.0+
Default: 100
Applications: API, Indexer

| +| `REINDEX_INTERNAL_TRANSACTIONS_STATUS_CONCURRENCY` | Number of parallel reindexing internal transaction batches processing. Implemented in [#11358](https://github.com/blockscout/blockscout/pull/11358) |

Version: v6.10.0+
Default: 1
Applications: API, Indexer

| +| `REINDEX_INTERNAL_TRANSACTIONS_STATUS_TIMEOUT` | Timeout between reindexing internal transaction batches processing. Implemented in [#11358](https://github.com/blockscout/blockscout/pull/11358) |

Version: v6.10.0+
Default: 0
Applications: API, Indexer

| +| `NFT_MEDIA_HANDLER_AWS_ACCESS_KEY_ID` | S3 API Access Key ID |

Version: v6.10.0+
Default: (empty)
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_AWS_SECRET_ACCESS_KEY` | S3 API Secret Access Key |

Version: v6.10.0+
Default: (empty)
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_AWS_BUCKET_HOST` | S3 API URL |

Version: v6.10.0+
Default: (empty)
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_AWS_BUCKET_NAME` | S3 bucket name |

Version: v6.10.0+
Default: (empty)
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_AWS_PUBLIC_BUCKET_URL` | Public S3 bucket URL |

Version: v6.10.0+
Default: (empty)
Applications: API

| +| `NFT_MEDIA_HANDLER_ENABLED` | if `true`, CDN feature enabled |

Version: v6.10.0+
Default: false
Applications: Indexer, NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_REMOTE_DISPATCHER_NODE_MODE_ENABLED` | if `true`, nft media handler is supposed to run separately. |

Version: v6.10.0+
Default: false
Applications: Indexer, NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_IS_WORKER` | if `true`, and `NFT_MEDIA_HANDLER_REMOTE_DISPATCHER_NODE_MODE_ENABLED=true` will be started only nft_media_handler app |

Version: v6.10.0+
Default: false
Applications: Indexer, NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_NODES_MAP` | String in json map format, where key is erland node and value is folder in R2/S3 bucket, example: `"{\"producer@172.18.0.4\": \"/folder_1\"}"`. If nft_media_handler runs in one pod with indexer, map should contain `self` key |

Version: v6.10.0+
Default: (empty)
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_WORKER_CONCURRENCY` | Concurrency of media handling (resizing/uploading) |

Version: v6.10.0+
Default: 10
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_WORKER_BATCH_SIZE` | Number of url processed by one async task |

Version: v6.10.0+
Default: 10
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_WORKER_SPAWN_TASKS_TIMEOUT` | Timeout before spawn new task |

Version: v6.10.0+
Default: 100ms
Applications: NFT_MEDIA_HANDLER

| +| `NFT_MEDIA_HANDLER_BACKFILL_ENABLED` | If `true`, unprocessed token instnaces from DB will be processed via nft_media_handler |

Version: v6.10.0+
Default: false
Applications: Indexer

| +| `NFT_MEDIA_HANDLER_BACKFILL_QUEUE_SIZE` | Max size of backfill queue |

Version: v6.10.0+
Default: 1000
Applications: Indexer

| +| `NFT_MEDIA_HANDLER_BACKFILL_ENQUEUE_BUSY_WAITING_TIMEOUT` | Timeout before new attempt to append item to backfill queue if it's full |

Version: v6.10.0+
Default: 1s
Applications: Indexer

| +| `NFT_MEDIA_HANDLER_CACHE_UNIQUENESS_MAX_SIZE` | Max size of cache, where stored already uploaded token instances media |

Version: v6.10.0+
Default: 100_000
Applications: Indexer

| +| `ADDRESSES_BLACKLIST` | A comma-separated list of addresses to enable restricted access to them. |

Version: v6.10.0+
Default: (empty)
Applications: API

| +| `ADDRESSES_BLACKLIST_KEY` | A key to access blacklisted addresses (either by `ADDRESSES_BLACKLIST` or by blacklist provider). Can be passed via query param to the page's URL: `?key=...` |

Version: v6.10.0+
Default: (empty)
Applications: API

| +| `ADDRESSES_BLACKLIST_PROVIDER` | Blacklist provider type, available options: `blockaid` |

Version: v6.10.0+
Default: blockaid
Applications: API

| +| `ADDRESSES_BLACKLIST_URL` | URL to fetch balcklist from |

Version: v6.10.0+
Default: (empty)
Applications: API

| +| `ADDRESSES_BLACKLIST_UPDATE_INTERVAL` | Interval between scheduled updates of blacklist |

Version: v6.10.0+
Default: 15m
Applications: API

| +| `ADDRESSES_BLACKLIST_RETRY_INTERVAL` | Time to wait before new attempt of blacklist fetching, after abnormal termination of fetching task |

Version: v6.10.0+
Default: 5s
Applications: API

| +| `MICROSERVICE_MULTICHAIN_SEARCH_URL` | Multichain Search Service API URL. Integration is enabled, if this variable value contains valid URL. Implemented in [#11139](https://github.com/blockscout/blockscout/pull/11139) |

Version: master
Default: (empty)
Applications: API, Indexer

| +| `MICROSERVICE_MULTICHAIN_SEARCH_API_KEY` | Multichain Search Service API key. Implemented in [#11139](https://github.com/blockscout/blockscout/pull/11139) |

Version: master
Default: (empty)
Applications: API, Indexer

| +| `MIGRATION_BACKFILL_MULTICHAIN_SEARCH_BATCH_SIZE` | Batch size of backfilling Multichain Search Service DB. Implemented in [#11139](https://github.com/blockscout/blockscout/pull/11139) |

Version: master
Default: (empty)
Applications: Indexer

| + +### Deprecated ENV Variables + + +| Variable | Required | Description | Default | Version | Need recompile | Deprecated in Version | +| ----------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | -------- | -------------- | --------------------- | +| `RESTRICTED_LIST` | | A comma-separated list of addresses to enable restricted access to them. | (empty) | v3.3.3+ | | v6.10.0 | +| `RESTRICTED_LIST_KEY` | | A key to access addresses listed in`RESTRICTED_LIST` variable. Can be passed via query param to the page's URL: `?key=...` | (empty) | v3.3.3+ | | v6.10.0 | + +## 6.9.2 + +### 🚀 Features + +- Xname app proxy ([#11010](https://github.com/blockscout/blockscout/issues/11010)) + +| Variable | Description | Parameters | +| --------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | +| `XNAME_BASE_API_URL` | [Xname API](https://xname.app/) base URL. Implemented in [#11010](https://github.com/blockscout/blockscout/pull/11010). |

Version: v6.9.2+
Default: https://gateway.xname.app
Applications: API

| +| `XNAME_API_TOKEN` | [Xname API](https://xname.app/) token. Implemented in [#11010](https://github.com/blockscout/blockscout/pull/11010). |

Version: v6.9.2+
Default: (empty)
Applications: API

+ +## 6.9.1 + +### 🐛 Bug Fixes + +- Add `auth0-forwarded-for` header in auth0 ([#11178](https://github.com/blockscout/blockscout/issues/11178)) + +### ⚙️ Miscellaneous Tasks + +- Extend recaptcha logging ([#11182](https://github.com/blockscout/blockscout/issues/11182)) + + +| Variable | Description | Parameters | +| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------- | +| `RE_CAPTCHA_SCORE_THRESHOLD`| Changes reCAPTCHA score threshold. Implemented in [#11182](https://github.com/blockscout/blockscout/pull/11182) |

Version: v6.9.1+
Default: 0.5
Applications: API

| + ## 6.9.0 ### 🚀 Features diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index bfc85b0255bb..7507d6224673 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -3,7 +3,7 @@ ## Our Pledge In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and +contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, diff --git a/apps/block_scout_web/README.md b/apps/block_scout_web/README.md index 8c1a6223937a..991adf0fc3cb 100644 --- a/apps/block_scout_web/README.md +++ b/apps/block_scout_web/README.md @@ -33,7 +33,7 @@ You can also run IEx (Interactive Elixir): `$ iex -S mix phx.server` (This can b * Lint the Elixir code: `mix credo --strict` * Run the dialyzer: `mix dialyzer --halt-exit-status` * Check the Elixir code for vulnerabilities: `mix sobelow --config` -* Update translations templates and translations and check there are no uncommitted changes: `mix gettext.extract --merge` +* Update translation templates and translations and check there are no uncommitted changes: `mix gettext.extract --merge` * Lint the JavaScript code: `cd assets && npm run eslint` ## Internationalization diff --git a/apps/block_scout_web/assets/package-lock.json b/apps/block_scout_web/assets/package-lock.json index 8ebf3c7dd3ed..4a9f90a859ad 100644 --- a/apps/block_scout_web/assets/package-lock.json +++ b/apps/block_scout_web/assets/package-lock.json @@ -7,8 +7,8 @@ "name": "blockscout", "license": "GPL-3.0", "dependencies": { - "@amplitude/analytics-browser": "^2.11.9", - "@fortawesome/fontawesome-free": "^6.7.1", + "@amplitude/analytics-browser": "^2.11.11", + "@fortawesome/fontawesome-free": "^6.7.2", "@tarekraafat/autocomplete.js": "^10.2.9", "@walletconnect/web3-provider": "^1.8.0", "assert": "^2.1.0", @@ -21,7 +21,7 @@ "crypto-browserify": "^3.12.1", "dropzone": "^5.9.3", "eth-net-props": "^1.0.41", - "highlight.js": "^11.10.0", + "highlight.js": "^11.11.1", "https-browserify": "^1.0.0", "humps": "^2.0.1", "jquery": "^3.7.1", @@ -46,7 +46,7 @@ "lodash.reduce": "^4.6.0", "luxon": "^3.5.0", "malihu-custom-scrollbar-plugin": "3.1.5", - "mixpanel-browser": "^2.56.0", + "mixpanel-browser": "^2.58.0", "moment": "^2.30.1", "nanomorph": "^5.4.0", "numeral": "^2.0.6", @@ -61,11 +61,11 @@ "redux": "^5.0.1", "stream-browserify": "^3.0.0", "stream-http": "^3.1.1", - "sweetalert2": "^11.14.1", + "sweetalert2": "^11.15.3", "urijs": "^1.19.11", "url": "^0.11.4", "util": "^0.12.5", - "viewerjs": "^1.11.6", + "viewerjs": "^1.11.7", "web3": "^4.12.1", "web3modal": "^1.9.12", "xss": "^1.0.15" @@ -89,11 +89,11 @@ "mini-css-extract-plugin": "^2.9.2", "postcss": "^8.4.49", "postcss-loader": "^8.1.1", - "sass": "^1.81.0", + "sass": "^1.83.0", "sass-loader": "^14.2.1", "style-loader": "^4.0.0", - "webpack": "^5.96.1", - "webpack-cli": "^5.1.4" + "webpack": "^5.97.1", + "webpack-cli": "^6.0.1" }, "engines": { "node": ">=16.0.0", @@ -121,16 +121,16 @@ "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" }, "node_modules/@amplitude/analytics-browser": { - "version": "2.11.9", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.11.9.tgz", - "integrity": "sha512-FHejpsW3OypNKaIBvMwLm74UUSBcR+VwrBsj7V2VlPDNRdeaFi21kJgVYUW5AcjxTsadMzBQGBb4BarZ4k2+9Q==", + "version": "2.11.11", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.11.11.tgz", + "integrity": "sha512-AdpNNPwoNPezojeeU2ITcyqKcrrW8edVBHlCEvDNIXYkf5Y0i5Blbes3x6rgONsOeV2hx85trTXhhVkilWgHcg==", "dependencies": { - "@amplitude/analytics-client-common": "^2.3.5", - "@amplitude/analytics-core": "^2.5.4", + "@amplitude/analytics-client-common": "^2.3.7", + "@amplitude/analytics-core": "^2.5.5", "@amplitude/analytics-remote-config": "^0.4.0", "@amplitude/analytics-types": "^2.8.4", "@amplitude/plugin-autocapture-browser": "^1.0.2", - "@amplitude/plugin-page-view-tracking-browser": "^2.3.5", + "@amplitude/plugin-page-view-tracking-browser": "^2.3.7", "tslib": "^2.4.1" } }, @@ -140,12 +140,12 @@ "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==" }, "node_modules/@amplitude/analytics-client-common": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.3.5.tgz", - "integrity": "sha512-BCP+jorfLMAKK/g87fAk4IPP/NzQLMCep+Qe23tqOCWguwTEINYnyzD/GmhaIKXSM2o9pmMLlHbhkA1vXUtF8g==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.3.7.tgz", + "integrity": "sha512-HuwP2MFoeCTZWFIxkeYZOy5GP9ydjRO+n2KUMhHXTXGUx1M9vxIx1BUHsHKOZ4BZ5qEUTacgmznyc6uJJUiCWg==", "dependencies": { "@amplitude/analytics-connector": "^1.4.8", - "@amplitude/analytics-core": "^2.5.4", + "@amplitude/analytics-core": "^2.5.5", "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" } @@ -164,9 +164,9 @@ } }, "node_modules/@amplitude/analytics-core": { - "version": "2.5.4", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.5.4.tgz", - "integrity": "sha512-J5ZF8hQmxmxM+7bu25a2TfTnk/LQ/oH5FYdg79f1lJ85Aa6oUlCDxgvXwy1RVpwaFjWlZQgV4XVaAUrxtSPRFw==", + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.5.5.tgz", + "integrity": "sha512-OSB1WSD6qYoHyHliZaSujyatik2SP+vtoy8Y0vgRdYIpbE24F2q+SwBF3X5A1IeUqZ5fdpz+BNMwwUVl0Z4Ykg==", "dependencies": { "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" @@ -223,11 +223,11 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" }, "node_modules/@amplitude/plugin-page-view-tracking-browser": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.3.5.tgz", - "integrity": "sha512-qcV4DLxRAZRriYBNvjc2PGW1EDad6PSsIXmxVs6j8i9fxY2SfdvsFd/Qd23CHj1e6Dt5QpAVJZpUMCEdqqDZbA==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.3.7.tgz", + "integrity": "sha512-9LEzU33vpQ1OdPwVn0nwcCqPLkfK3P19hLmFTflx+aBM70TH9xCwvJL6nJ5eyc4kkmE9x7r0mRVnQIxaHfTxGg==", "dependencies": { - "@amplitude/analytics-client-common": "^2.3.5", + "@amplitude/analytics-client-common": "^2.3.7", "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" } @@ -1955,12 +1955,12 @@ "dev": true }, "node_modules/@discoveryjs/json-ext": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.5.tgz", - "integrity": "sha512-6nFkfkmSeV/rqSaS4oWHgmpnYw194f6hmWF5is6b0J1naJZoiD0NTc9AiUwPHvWsowkjuHErCZT1wa0jg+BLIA==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.6.3.tgz", + "integrity": "sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==", "dev": true, "engines": { - "node": ">=10.0.0" + "node": ">=14.17.0" } }, "node_modules/@emotion/is-prop-valid": { @@ -2099,9 +2099,9 @@ } }, "node_modules/@fortawesome/fontawesome-free": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.1.tgz", - "integrity": "sha512-ALIk/MOh5gYe1TG/ieS5mVUsk7VUIJTJKPMK9rFFqOgfp0Q3d5QiBXbcOMwUvs37fyZVCz46YjOE6IFeOAXCHA==", + "version": "6.7.2", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.2.tgz", + "integrity": "sha512-JUOtgFW6k9u4Y+xeIaEiLr3+cjoUPiAuLXoyKOJSia6Duzb7pq+A76P9ZdPDoAoxHdHzq6gE9/jKBGXlZT8FbA==", "engines": { "node": ">=6" } @@ -3817,188 +3817,188 @@ } }, "node_modules/@webassemblyjs/ast": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", - "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", "dev": true, "dependencies": { - "@webassemblyjs/helper-numbers": "1.11.6", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6" + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "node_modules/@webassemblyjs/floating-point-hex-parser": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz", - "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", + "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", "dev": true }, "node_modules/@webassemblyjs/helper-api-error": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz", - "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", + "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", "dev": true }, "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", - "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", + "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", "dev": true }, "node_modules/@webassemblyjs/helper-numbers": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz", - "integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", + "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", "dev": true, "dependencies": { - "@webassemblyjs/floating-point-hex-parser": "1.11.6", - "@webassemblyjs/helper-api-error": "1.11.6", + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "node_modules/@webassemblyjs/helper-wasm-bytecode": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz", - "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", + "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", "dev": true }, "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", - "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", + "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/wasm-gen": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" } }, "node_modules/@webassemblyjs/ieee754": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz", - "integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", + "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", "dev": true, "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "node_modules/@webassemblyjs/leb128": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz", - "integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", + "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", "dev": true, "dependencies": { "@xtuc/long": "4.2.2" } }, "node_modules/@webassemblyjs/utf8": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz", - "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", + "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", "dev": true }, "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", - "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", + "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/helper-wasm-section": "1.12.1", - "@webassemblyjs/wasm-gen": "1.12.1", - "@webassemblyjs/wasm-opt": "1.12.1", - "@webassemblyjs/wasm-parser": "1.12.1", - "@webassemblyjs/wast-printer": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" } }, "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", - "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", + "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/ieee754": "1.11.6", - "@webassemblyjs/leb128": "1.11.6", - "@webassemblyjs/utf8": "1.11.6" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" } }, "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", - "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", + "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/wasm-gen": "1.12.1", - "@webassemblyjs/wasm-parser": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" } }, "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", - "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", + "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-api-error": "1.11.6", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/ieee754": "1.11.6", - "@webassemblyjs/leb128": "1.11.6", - "@webassemblyjs/utf8": "1.11.6" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" } }, "node_modules/@webassemblyjs/wast-printer": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", - "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", + "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", "dev": true, "dependencies": { - "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "node_modules/@webpack-cli/configtest": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-2.1.1.tgz", - "integrity": "sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-3.0.1.tgz", + "integrity": "sha512-u8d0pJ5YFgneF/GuvEiDA61Tf1VDomHHYMjv/wc9XzYj7nopltpG96nXN5dJRstxZhcNpV1g+nT6CydO7pHbjA==", "dev": true, "engines": { - "node": ">=14.15.0" + "node": ">=18.12.0" }, "peerDependencies": { - "webpack": "5.x.x", - "webpack-cli": "5.x.x" + "webpack": "^5.82.0", + "webpack-cli": "6.x.x" } }, "node_modules/@webpack-cli/info": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-2.0.2.tgz", - "integrity": "sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-3.0.1.tgz", + "integrity": "sha512-coEmDzc2u/ffMvuW9aCjoRzNSPDl/XLuhPdlFRpT9tZHmJ/039az33CE7uH+8s0uL1j5ZNtfdv0HkfaKRBGJsQ==", "dev": true, "engines": { - "node": ">=14.15.0" + "node": ">=18.12.0" }, "peerDependencies": { - "webpack": "5.x.x", - "webpack-cli": "5.x.x" + "webpack": "^5.82.0", + "webpack-cli": "6.x.x" } }, "node_modules/@webpack-cli/serve": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-2.0.5.tgz", - "integrity": "sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-3.0.1.tgz", + "integrity": "sha512-sbgw03xQaCLiT6gcY/6u3qBDn01CWw/nbaXl3gTdTFuJJ75Gffv3E3DBpgvY2fkkrdS1fpjaXNOmJlnbtKauKg==", "dev": true, "engines": { - "node": ">=14.15.0" + "node": ">=18.12.0" }, "peerDependencies": { - "webpack": "5.x.x", - "webpack-cli": "5.x.x" + "webpack": "^5.82.0", + "webpack-cli": "6.x.x" }, "peerDependenciesMeta": { "webpack-dev-server": { @@ -6676,9 +6676,9 @@ } }, "node_modules/envinfo": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", - "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.14.0.tgz", + "integrity": "sha512-CO40UI41xDQzhLB1hWyqUKgFhs250pNcGbyGKe1l/e4FSaI/+YE4IMG76GDt0In67WLPACIITC+sOi08x4wIvg==", "dev": true, "bin": { "envinfo": "dist/cli.js" @@ -8279,6 +8279,15 @@ "node": ">=8" } }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, "node_modules/flat-cache": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", @@ -8771,9 +8780,9 @@ } }, "node_modules/highlight.js": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.10.0.tgz", - "integrity": "sha512-SYVnVFswQER+zu1laSya563s+F8VDGt7o35d4utbamowvUNLLMovFqwCLSocpZTz3MgaSRA1IbqRWZv97dtErQ==", + "version": "11.11.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.11.1.tgz", + "integrity": "sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==", "engines": { "node": ">=12.0.0" } @@ -9393,7 +9402,7 @@ "node_modules/isobject": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", "dev": true, "engines": { "node": ">=0.10.0" @@ -12073,9 +12082,9 @@ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==" }, "node_modules/mixpanel-browser": { - "version": "2.56.0", - "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.56.0.tgz", - "integrity": "sha512-GYeEz58pV2M9MZtK8vSPL4oJmCwGS08FDDRZvZwr5VJpWdT4Lgyg6zXhmNfCmSTEIw2coaarm7HZ4FL9dAVvnA==", + "version": "2.58.0", + "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.58.0.tgz", + "integrity": "sha512-ZayNE4augjSJh5RxYKRPhFe1jzS9HZnoowvZaN4DaUeCezbLGVck46L+N9X8VLtK74UgLUYfehPgCr41rtgpRA==", "dependencies": { "rrweb": "2.0.0-alpha.13" } @@ -14217,9 +14226,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/sass": { - "version": "1.81.0", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.81.0.tgz", - "integrity": "sha512-Q4fOxRfhmv3sqCLoGfvrC9pRV8btc0UtqL9mN6Yrv6Qi9ScL55CVH1vlPP863ISLEEMNLLuu9P+enCeGHlnzhA==", + "version": "1.83.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.83.0.tgz", + "integrity": "sha512-qsSxlayzoOjdvXMVLkzF84DJFc2HZEL/rFyGIKbbilYtAvlCxyuzUeff9LawTn4btVnLKg75Z8MMr1lxU1lfGw==", "dev": true, "dependencies": { "chokidar": "^4.0.0", @@ -14919,9 +14928,9 @@ } }, "node_modules/sweetalert2": { - "version": "11.14.1", - "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.14.1.tgz", - "integrity": "sha512-xadhfcA4STGMh8nC5zHFFWURhRpWc4zyI3GdMDFH/m3hGWZeQQNWhX9xcG4lI9gZYsi/IlazKbwvvje3juL3Xg==", + "version": "11.15.3", + "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.15.3.tgz", + "integrity": "sha512-+0imNg+XYL8tKgx8hM0xoiXX3KfgxHDmiDc8nTJFO89fQEEhJlkecSdyYOZ3IhVMcUmoNte4fTIwWiugwkPU6w==", "funding": { "type": "individual", "url": "https://github.com/sponsors/limonte" @@ -15500,9 +15509,9 @@ } }, "node_modules/viewerjs": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/viewerjs/-/viewerjs-1.11.6.tgz", - "integrity": "sha512-TlhdSp2oEOLFXvEp4psKaeTjR5zBjTRcM/sHUN8PkV1UWuY8HKC8n7GaVdW5Xqnwdr/F1OmzLik1QwDjI4w/nw==" + "version": "1.11.7", + "resolved": "https://registry.npmjs.org/viewerjs/-/viewerjs-1.11.7.tgz", + "integrity": "sha512-0JuVqOmL5v1jmEAlG5EBDR3XquxY8DWFQbFMprOXgaBB0F7Q/X9xWdEaQc59D8xzwkdUgXEMSSknTpriq95igg==" }, "node_modules/w3c-hr-time": { "version": "1.0.2", @@ -16023,16 +16032,16 @@ } }, "node_modules/webpack": { - "version": "5.96.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.96.1.tgz", - "integrity": "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA==", + "version": "5.97.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.97.1.tgz", + "integrity": "sha512-EksG6gFY3L1eFMROS/7Wzgrii5mBAFe4rIr3r2BTfo7bcc+DWwFZ4OJ/miOuHJO/A85HwyI4eQ0F6IKXesO7Fg==", "dev": true, "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", - "@webassemblyjs/ast": "^1.12.1", - "@webassemblyjs/wasm-edit": "^1.12.1", - "@webassemblyjs/wasm-parser": "^1.12.1", + "@webassemblyjs/ast": "^1.14.1", + "@webassemblyjs/wasm-edit": "^1.14.1", + "@webassemblyjs/wasm-parser": "^1.14.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", @@ -16069,42 +16078,39 @@ } }, "node_modules/webpack-cli": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.1.4.tgz", - "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-6.0.1.tgz", + "integrity": "sha512-MfwFQ6SfwinsUVi0rNJm7rHZ31GyTcpVE5pgVA3hwFRb7COD4TzjUUwhGWKfO50+xdc2MQPuEBBJoqIMGt3JDw==", "dev": true, "dependencies": { - "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^2.1.1", - "@webpack-cli/info": "^2.0.2", - "@webpack-cli/serve": "^2.0.5", + "@discoveryjs/json-ext": "^0.6.1", + "@webpack-cli/configtest": "^3.0.1", + "@webpack-cli/info": "^3.0.1", + "@webpack-cli/serve": "^3.0.1", "colorette": "^2.0.14", - "commander": "^10.0.1", + "commander": "^12.1.0", "cross-spawn": "^7.0.3", - "envinfo": "^7.7.3", + "envinfo": "^7.14.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", "interpret": "^3.1.1", "rechoir": "^0.8.0", - "webpack-merge": "^5.7.3" + "webpack-merge": "^6.0.1" }, "bin": { "webpack-cli": "bin/cli.js" }, "engines": { - "node": ">=14.15.0" + "node": ">=18.12.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/webpack" }, "peerDependencies": { - "webpack": "5.x.x" + "webpack": "^5.82.0" }, "peerDependenciesMeta": { - "@webpack-cli/generators": { - "optional": true - }, "webpack-bundle-analyzer": { "optional": true }, @@ -16114,25 +16120,26 @@ } }, "node_modules/webpack-cli/node_modules/commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", "dev": true, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/webpack-merge": { - "version": "5.8.0", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", - "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-6.0.1.tgz", + "integrity": "sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==", "dev": true, "dependencies": { "clone-deep": "^4.0.1", - "wildcard": "^2.0.0" + "flat": "^5.0.2", + "wildcard": "^2.0.1" }, "engines": { - "node": ">=10.0.0" + "node": ">=18.0.0" } }, "node_modules/webpack/node_modules/schema-utils": { @@ -16268,9 +16275,9 @@ } }, "node_modules/wildcard": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", - "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", + "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", "dev": true }, "node_modules/word-wrap": { @@ -16514,16 +16521,16 @@ "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" }, "@amplitude/analytics-browser": { - "version": "2.11.9", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.11.9.tgz", - "integrity": "sha512-FHejpsW3OypNKaIBvMwLm74UUSBcR+VwrBsj7V2VlPDNRdeaFi21kJgVYUW5AcjxTsadMzBQGBb4BarZ4k2+9Q==", + "version": "2.11.11", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-browser/-/analytics-browser-2.11.11.tgz", + "integrity": "sha512-AdpNNPwoNPezojeeU2ITcyqKcrrW8edVBHlCEvDNIXYkf5Y0i5Blbes3x6rgONsOeV2hx85trTXhhVkilWgHcg==", "requires": { - "@amplitude/analytics-client-common": "^2.3.5", - "@amplitude/analytics-core": "^2.5.4", + "@amplitude/analytics-client-common": "^2.3.7", + "@amplitude/analytics-core": "^2.5.5", "@amplitude/analytics-remote-config": "^0.4.0", "@amplitude/analytics-types": "^2.8.4", "@amplitude/plugin-autocapture-browser": "^1.0.2", - "@amplitude/plugin-page-view-tracking-browser": "^2.3.5", + "@amplitude/plugin-page-view-tracking-browser": "^2.3.7", "tslib": "^2.4.1" }, "dependencies": { @@ -16535,12 +16542,12 @@ } }, "@amplitude/analytics-client-common": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.3.5.tgz", - "integrity": "sha512-BCP+jorfLMAKK/g87fAk4IPP/NzQLMCep+Qe23tqOCWguwTEINYnyzD/GmhaIKXSM2o9pmMLlHbhkA1vXUtF8g==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-client-common/-/analytics-client-common-2.3.7.tgz", + "integrity": "sha512-HuwP2MFoeCTZWFIxkeYZOy5GP9ydjRO+n2KUMhHXTXGUx1M9vxIx1BUHsHKOZ4BZ5qEUTacgmznyc6uJJUiCWg==", "requires": { "@amplitude/analytics-connector": "^1.4.8", - "@amplitude/analytics-core": "^2.5.4", + "@amplitude/analytics-core": "^2.5.5", "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" }, @@ -16561,9 +16568,9 @@ } }, "@amplitude/analytics-core": { - "version": "2.5.4", - "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.5.4.tgz", - "integrity": "sha512-J5ZF8hQmxmxM+7bu25a2TfTnk/LQ/oH5FYdg79f1lJ85Aa6oUlCDxgvXwy1RVpwaFjWlZQgV4XVaAUrxtSPRFw==", + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/@amplitude/analytics-core/-/analytics-core-2.5.5.tgz", + "integrity": "sha512-OSB1WSD6qYoHyHliZaSujyatik2SP+vtoy8Y0vgRdYIpbE24F2q+SwBF3X5A1IeUqZ5fdpz+BNMwwUVl0Z4Ykg==", "requires": { "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" @@ -16626,11 +16633,11 @@ } }, "@amplitude/plugin-page-view-tracking-browser": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.3.5.tgz", - "integrity": "sha512-qcV4DLxRAZRriYBNvjc2PGW1EDad6PSsIXmxVs6j8i9fxY2SfdvsFd/Qd23CHj1e6Dt5QpAVJZpUMCEdqqDZbA==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/@amplitude/plugin-page-view-tracking-browser/-/plugin-page-view-tracking-browser-2.3.7.tgz", + "integrity": "sha512-9LEzU33vpQ1OdPwVn0nwcCqPLkfK3P19hLmFTflx+aBM70TH9xCwvJL6nJ5eyc4kkmE9x7r0mRVnQIxaHfTxGg==", "requires": { - "@amplitude/analytics-client-common": "^2.3.5", + "@amplitude/analytics-client-common": "^2.3.7", "@amplitude/analytics-types": "^2.8.4", "tslib": "^2.4.1" }, @@ -17824,9 +17831,9 @@ "dev": true }, "@discoveryjs/json-ext": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.5.tgz", - "integrity": "sha512-6nFkfkmSeV/rqSaS4oWHgmpnYw194f6hmWF5is6b0J1naJZoiD0NTc9AiUwPHvWsowkjuHErCZT1wa0jg+BLIA==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.6.3.tgz", + "integrity": "sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==", "dev": true }, "@emotion/is-prop-valid": { @@ -17928,9 +17935,9 @@ "integrity": "sha512-tqsQiBQDQdmPWE1xkkBq4rlSW5QZpLOUJ5RJh2/9fug+q9tnUhuZoVLk7s0scUIKTOzEtR72DFBXI4WiZcMpvw==" }, "@fortawesome/fontawesome-free": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.1.tgz", - "integrity": "sha512-ALIk/MOh5gYe1TG/ieS5mVUsk7VUIJTJKPMK9rFFqOgfp0Q3d5QiBXbcOMwUvs37fyZVCz46YjOE6IFeOAXCHA==" + "version": "6.7.2", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.2.tgz", + "integrity": "sha512-JUOtgFW6k9u4Y+xeIaEiLr3+cjoUPiAuLXoyKOJSia6Duzb7pq+A76P9ZdPDoAoxHdHzq6gE9/jKBGXlZT8FbA==" }, "@humanwhocodes/config-array": { "version": "0.13.0", @@ -19212,169 +19219,169 @@ } }, "@webassemblyjs/ast": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz", - "integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", "dev": true, "requires": { - "@webassemblyjs/helper-numbers": "1.11.6", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6" + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "@webassemblyjs/floating-point-hex-parser": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz", - "integrity": "sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", + "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", "dev": true }, "@webassemblyjs/helper-api-error": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz", - "integrity": "sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", + "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", "dev": true }, "@webassemblyjs/helper-buffer": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz", - "integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", + "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", "dev": true }, "@webassemblyjs/helper-numbers": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz", - "integrity": "sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", + "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", "dev": true, "requires": { - "@webassemblyjs/floating-point-hex-parser": "1.11.6", - "@webassemblyjs/helper-api-error": "1.11.6", + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "@webassemblyjs/helper-wasm-bytecode": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz", - "integrity": "sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", + "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", "dev": true }, "@webassemblyjs/helper-wasm-section": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz", - "integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", + "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/wasm-gen": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" } }, "@webassemblyjs/ieee754": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz", - "integrity": "sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", + "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", "dev": true, "requires": { "@xtuc/ieee754": "^1.2.0" } }, "@webassemblyjs/leb128": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz", - "integrity": "sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", + "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", "dev": true, "requires": { "@xtuc/long": "4.2.2" } }, "@webassemblyjs/utf8": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz", - "integrity": "sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", + "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", "dev": true }, "@webassemblyjs/wasm-edit": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz", - "integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", + "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/helper-wasm-section": "1.12.1", - "@webassemblyjs/wasm-gen": "1.12.1", - "@webassemblyjs/wasm-opt": "1.12.1", - "@webassemblyjs/wasm-parser": "1.12.1", - "@webassemblyjs/wast-printer": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" } }, "@webassemblyjs/wasm-gen": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz", - "integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", + "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/ieee754": "1.11.6", - "@webassemblyjs/leb128": "1.11.6", - "@webassemblyjs/utf8": "1.11.6" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" } }, "@webassemblyjs/wasm-opt": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz", - "integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", + "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-buffer": "1.12.1", - "@webassemblyjs/wasm-gen": "1.12.1", - "@webassemblyjs/wasm-parser": "1.12.1" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" } }, "@webassemblyjs/wasm-parser": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz", - "integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", + "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", - "@webassemblyjs/helper-api-error": "1.11.6", - "@webassemblyjs/helper-wasm-bytecode": "1.11.6", - "@webassemblyjs/ieee754": "1.11.6", - "@webassemblyjs/leb128": "1.11.6", - "@webassemblyjs/utf8": "1.11.6" + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" } }, "@webassemblyjs/wast-printer": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz", - "integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", + "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.12.1", + "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "@webpack-cli/configtest": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-2.1.1.tgz", - "integrity": "sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-3.0.1.tgz", + "integrity": "sha512-u8d0pJ5YFgneF/GuvEiDA61Tf1VDomHHYMjv/wc9XzYj7nopltpG96nXN5dJRstxZhcNpV1g+nT6CydO7pHbjA==", "dev": true, "requires": {} }, "@webpack-cli/info": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-2.0.2.tgz", - "integrity": "sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-3.0.1.tgz", + "integrity": "sha512-coEmDzc2u/ffMvuW9aCjoRzNSPDl/XLuhPdlFRpT9tZHmJ/039az33CE7uH+8s0uL1j5ZNtfdv0HkfaKRBGJsQ==", "dev": true, "requires": {} }, "@webpack-cli/serve": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-2.0.5.tgz", - "integrity": "sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-3.0.1.tgz", + "integrity": "sha512-sbgw03xQaCLiT6gcY/6u3qBDn01CWw/nbaXl3gTdTFuJJ75Gffv3E3DBpgvY2fkkrdS1fpjaXNOmJlnbtKauKg==", "dev": true, "requires": {} }, @@ -21369,9 +21376,9 @@ "dev": true }, "envinfo": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", - "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.14.0.tgz", + "integrity": "sha512-CO40UI41xDQzhLB1hWyqUKgFhs250pNcGbyGKe1l/e4FSaI/+YE4IMG76GDt0In67WLPACIITC+sOi08x4wIvg==", "dev": true }, "errno": { @@ -22653,6 +22660,12 @@ } } }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, "flat-cache": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", @@ -22991,9 +23004,9 @@ } }, "highlight.js": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.10.0.tgz", - "integrity": "sha512-SYVnVFswQER+zu1laSya563s+F8VDGt7o35d4utbamowvUNLLMovFqwCLSocpZTz3MgaSRA1IbqRWZv97dtErQ==" + "version": "11.11.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.11.1.tgz", + "integrity": "sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==" }, "hmac-drbg": { "version": "1.0.1", @@ -23417,7 +23430,7 @@ "isobject": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", "dev": true }, "isomorphic-ws": { @@ -25514,9 +25527,9 @@ "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==" }, "mixpanel-browser": { - "version": "2.56.0", - "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.56.0.tgz", - "integrity": "sha512-GYeEz58pV2M9MZtK8vSPL4oJmCwGS08FDDRZvZwr5VJpWdT4Lgyg6zXhmNfCmSTEIw2coaarm7HZ4FL9dAVvnA==", + "version": "2.58.0", + "resolved": "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.58.0.tgz", + "integrity": "sha512-ZayNE4augjSJh5RxYKRPhFe1jzS9HZnoowvZaN4DaUeCezbLGVck46L+N9X8VLtK74UgLUYfehPgCr41rtgpRA==", "requires": { "rrweb": "2.0.0-alpha.13" } @@ -27025,9 +27038,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sass": { - "version": "1.81.0", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.81.0.tgz", - "integrity": "sha512-Q4fOxRfhmv3sqCLoGfvrC9pRV8btc0UtqL9mN6Yrv6Qi9ScL55CVH1vlPP863ISLEEMNLLuu9P+enCeGHlnzhA==", + "version": "1.83.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.83.0.tgz", + "integrity": "sha512-qsSxlayzoOjdvXMVLkzF84DJFc2HZEL/rFyGIKbbilYtAvlCxyuzUeff9LawTn4btVnLKg75Z8MMr1lxU1lfGw==", "dev": true, "requires": { "@parcel/watcher": "^2.4.1", @@ -27516,9 +27529,9 @@ } }, "sweetalert2": { - "version": "11.14.1", - "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.14.1.tgz", - "integrity": "sha512-xadhfcA4STGMh8nC5zHFFWURhRpWc4zyI3GdMDFH/m3hGWZeQQNWhX9xcG4lI9gZYsi/IlazKbwvvje3juL3Xg==" + "version": "11.15.3", + "resolved": "https://registry.npmjs.org/sweetalert2/-/sweetalert2-11.15.3.tgz", + "integrity": "sha512-+0imNg+XYL8tKgx8hM0xoiXX3KfgxHDmiDc8nTJFO89fQEEhJlkecSdyYOZ3IhVMcUmoNte4fTIwWiugwkPU6w==" }, "symbol-tree": { "version": "3.2.4", @@ -27938,9 +27951,9 @@ } }, "viewerjs": { - "version": "1.11.6", - "resolved": "https://registry.npmjs.org/viewerjs/-/viewerjs-1.11.6.tgz", - "integrity": "sha512-TlhdSp2oEOLFXvEp4psKaeTjR5zBjTRcM/sHUN8PkV1UWuY8HKC8n7GaVdW5Xqnwdr/F1OmzLik1QwDjI4w/nw==" + "version": "1.11.7", + "resolved": "https://registry.npmjs.org/viewerjs/-/viewerjs-1.11.7.tgz", + "integrity": "sha512-0JuVqOmL5v1jmEAlG5EBDR3XquxY8DWFQbFMprOXgaBB0F7Q/X9xWdEaQc59D8xzwkdUgXEMSSknTpriq95igg==" }, "w3c-hr-time": { "version": "1.0.2", @@ -28380,16 +28393,16 @@ "dev": true }, "webpack": { - "version": "5.96.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.96.1.tgz", - "integrity": "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA==", + "version": "5.97.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.97.1.tgz", + "integrity": "sha512-EksG6gFY3L1eFMROS/7Wzgrii5mBAFe4rIr3r2BTfo7bcc+DWwFZ4OJ/miOuHJO/A85HwyI4eQ0F6IKXesO7Fg==", "dev": true, "requires": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", - "@webassemblyjs/ast": "^1.12.1", - "@webassemblyjs/wasm-edit": "^1.12.1", - "@webassemblyjs/wasm-parser": "^1.12.1", + "@webassemblyjs/ast": "^1.14.1", + "@webassemblyjs/wasm-edit": "^1.14.1", + "@webassemblyjs/wasm-parser": "^1.14.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", @@ -28430,42 +28443,43 @@ } }, "webpack-cli": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.1.4.tgz", - "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-6.0.1.tgz", + "integrity": "sha512-MfwFQ6SfwinsUVi0rNJm7rHZ31GyTcpVE5pgVA3hwFRb7COD4TzjUUwhGWKfO50+xdc2MQPuEBBJoqIMGt3JDw==", "dev": true, "requires": { - "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^2.1.1", - "@webpack-cli/info": "^2.0.2", - "@webpack-cli/serve": "^2.0.5", + "@discoveryjs/json-ext": "^0.6.1", + "@webpack-cli/configtest": "^3.0.1", + "@webpack-cli/info": "^3.0.1", + "@webpack-cli/serve": "^3.0.1", "colorette": "^2.0.14", - "commander": "^10.0.1", + "commander": "^12.1.0", "cross-spawn": "^7.0.3", - "envinfo": "^7.7.3", + "envinfo": "^7.14.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", "interpret": "^3.1.1", "rechoir": "^0.8.0", - "webpack-merge": "^5.7.3" + "webpack-merge": "^6.0.1" }, "dependencies": { "commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", "dev": true } } }, "webpack-merge": { - "version": "5.8.0", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", - "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-6.0.1.tgz", + "integrity": "sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==", "dev": true, "requires": { "clone-deep": "^4.0.1", - "wildcard": "^2.0.0" + "flat": "^5.0.2", + "wildcard": "^2.0.1" } }, "whatwg-encoding": { @@ -28549,9 +28563,9 @@ } }, "wildcard": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", - "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", + "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", "dev": true }, "word-wrap": { diff --git a/apps/block_scout_web/assets/package.json b/apps/block_scout_web/assets/package.json index 1302f2873e31..3dfaea0d6fce 100644 --- a/apps/block_scout_web/assets/package.json +++ b/apps/block_scout_web/assets/package.json @@ -19,8 +19,8 @@ "eslint": "eslint js/**" }, "dependencies": { - "@fortawesome/fontawesome-free": "^6.7.1", - "@amplitude/analytics-browser": "^2.11.9", + "@fortawesome/fontawesome-free": "^6.7.2", + "@amplitude/analytics-browser": "^2.11.11", "@tarekraafat/autocomplete.js": "^10.2.9", "@walletconnect/web3-provider": "^1.8.0", "assert": "^2.1.0", @@ -33,7 +33,7 @@ "crypto-browserify": "^3.12.1", "dropzone": "^5.9.3", "eth-net-props": "^1.0.41", - "highlight.js": "^11.10.0", + "highlight.js": "^11.11.1", "https-browserify": "^1.0.0", "humps": "^2.0.1", "jquery": "^3.7.1", @@ -58,7 +58,7 @@ "lodash.reduce": "^4.6.0", "luxon": "^3.5.0", "malihu-custom-scrollbar-plugin": "3.1.5", - "mixpanel-browser": "^2.56.0", + "mixpanel-browser": "^2.58.0", "moment": "^2.30.1", "nanomorph": "^5.4.0", "numeral": "^2.0.6", @@ -73,11 +73,11 @@ "redux": "^5.0.1", "stream-browserify": "^3.0.0", "stream-http": "^3.1.1", - "sweetalert2": "^11.14.1", + "sweetalert2": "^11.15.3", "urijs": "^1.19.11", "url": "^0.11.4", "util": "^0.12.5", - "viewerjs": "^1.11.6", + "viewerjs": "^1.11.7", "web3": "^4.12.1", "web3modal": "^1.9.12", "xss": "^1.0.15" @@ -101,11 +101,11 @@ "mini-css-extract-plugin": "^2.9.2", "postcss": "^8.4.49", "postcss-loader": "^8.1.1", - "sass": "^1.81.0", + "sass": "^1.83.0", "sass-loader": "^14.2.1", "style-loader": "^4.0.0", - "webpack": "^5.96.1", - "webpack-cli": "^5.1.4" + "webpack": "^5.97.1", + "webpack-cli": "^6.0.1" }, "jest": { "moduleNameMapper": { diff --git a/apps/block_scout_web/lib/block_scout_web.ex b/apps/block_scout_web/lib/block_scout_web.ex index f914ed426af0..b7afec23a90d 100644 --- a/apps/block_scout_web/lib/block_scout_web.ex +++ b/apps/block_scout_web/lib/block_scout_web.ex @@ -85,6 +85,7 @@ defmodule BlockScoutWeb do use Gettext, backend: BlockScoutWeb.Gettext import Explorer.Chain.SmartContract.Proxy.Models.Implementation, only: [proxy_implementations_association: 0] + import BlockScoutWeb.AccessHelper, only: [valid_address_hash_and_not_restricted_access?: 1] end end diff --git a/apps/block_scout_web/lib/block_scout_web/application.ex b/apps/block_scout_web/lib/block_scout_web/application.ex index e50cc806c161..6fb956b067bf 100644 --- a/apps/block_scout_web/lib/block_scout_web/application.ex +++ b/apps/block_scout_web/lib/block_scout_web/application.ex @@ -6,14 +6,20 @@ defmodule BlockScoutWeb.Application do use Application use Utils.CompileTimeEnvHelper, disable_api?: [:block_scout_web, :disable_api?] - alias BlockScoutWeb.Endpoint + alias BlockScoutWeb.{Endpoint, HealthEndpoint} def start(_type, _args) do - base_children = [Supervisor.child_spec(Endpoint, [])] - api_children = setup_and_define_children() - all_children = base_children ++ api_children opts = [strategy: :one_for_one, name: BlockScoutWeb.Supervisor, max_restarts: 1_000] - Supervisor.start_link(all_children, opts) + + if Application.get_env(:nft_media_handler, :standalone_media_worker?) do + Supervisor.start_link([Supervisor.child_spec(HealthEndpoint, [])], opts) + else + base_children = [Supervisor.child_spec(Endpoint, [])] + api_children = setup_and_define_children() + all_children = base_children ++ api_children + + Supervisor.start_link(all_children, opts) + end end # Tell Phoenix to update the endpoint configuration diff --git a/apps/block_scout_web/lib/block_scout_web/chain.ex b/apps/block_scout_web/lib/block_scout_web/chain.ex index 6bb32125ba31..1d92c0bc3e22 100644 --- a/apps/block_scout_web/lib/block_scout_web/chain.ex +++ b/apps/block_scout_web/lib/block_scout_web/chain.ex @@ -2,6 +2,7 @@ defmodule BlockScoutWeb.Chain do @moduledoc """ Converts the `param` to the corresponding resource that uses that format of param. """ + use Utils.CompileTimeEnvHelper, chain_type: [:explorer, :chain_type] import Explorer.Chain, only: [ @@ -52,7 +53,6 @@ defmodule BlockScoutWeb.Chain do alias Explorer.Chain.Optimism.FrameSequence, as: OptimismFrameSequence alias Explorer.Chain.Optimism.OutputRoot, as: OptimismOutputRoot alias Explorer.Chain.Scroll.Bridge, as: ScrollBridge - alias Explorer.PagingOptions defimpl Poison.Encoder, for: Decimal do @@ -105,10 +105,37 @@ defmodule BlockScoutWeb.Chain do def from_param(param) when byte_size(param) == @full_hash_len, do: block_or_transaction_or_operation_or_blob_from_param("0x" <> param) - def from_param(string) when is_binary(string) do - case param_to_block_number(string) do - {:ok, number} -> number_to_block(number) - _ -> search_ens_domain(string) + if @chain_type == :filecoin do + def from_param(string) when is_binary(string) do + case param_to_block_number(string) do + {:ok, number} -> + number_to_block(number) + + _ -> + case Search.maybe_parse_filecoin_address(string) do + {:ok, filecoin_address} -> + result = + filecoin_address + |> Search.address_by_filecoin_id_or_robust() + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + |> Explorer.Chain.select_repo(api?: true).one() + + (result && {:ok, result}) || {:error, :not_found} + + _ -> + search_ens_domain(string) + end + end + end + else + def from_param(string) when is_binary(string) do + case param_to_block_number(string) do + {:ok, number} -> + number_to_block(number) + + _ -> + search_ens_domain(string) + end end end @@ -179,28 +206,6 @@ defmodule BlockScoutWeb.Chain do end end - def paging_options(%{ - "address_hash" => address_hash_string, - "transaction_hash" => transaction_hash_string, - "block_hash" => block_hash_string, - "holder_count" => holder_count_string, - "name" => name_string, - "inserted_at" => inserted_at_string, - "item_type" => item_type_string - }) - when is_binary(address_hash_string) and is_binary(transaction_hash_string) and is_binary(block_hash_string) and - is_binary(holder_count_string) and is_binary(name_string) and is_binary(inserted_at_string) and - is_binary(item_type_string) do - [ - paging_options: %{ - @default_paging_options - | key: - {address_hash_string, transaction_hash_string, block_hash_string, holder_count_string, name_string, - inserted_at_string, item_type_string} - } - ] - end - def paging_options( %{ "market_cap" => market_cap_string, @@ -605,7 +610,7 @@ defmodule BlockScoutWeb.Chain do nil -> {:error, :not_found} - result -> + {result, _address_hash} -> {:ok, result} end end @@ -747,29 +752,6 @@ defmodule BlockScoutWeb.Chain do %{"number" => number} end - # clause for search results pagination - defp paging_params(%{ - address_hash: address_hash, - transaction_hash: transaction_hash, - block_hash: block_hash, - holder_count: holder_count, - name: name, - inserted_at: inserted_at, - type: type - }) do - inserted_at_datetime = DateTime.to_iso8601(inserted_at) - - %{ - "address_hash" => address_hash, - "transaction_hash" => transaction_hash, - "block_hash" => block_hash, - "holder_count" => holder_count, - "name" => name, - "inserted_at" => inserted_at_datetime, - "item_type" => type - } - end - defp paging_params(%Instance{token_id: token_id}) do %{"unique_token" => Decimal.to_integer(token_id)} end diff --git a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex index 424a3f068f27..65b0ef1cf55a 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/address_channel.ex @@ -66,8 +66,14 @@ defmodule BlockScoutWeb.AddressChannel do ] ++ @chain_type_transaction_associations - def join("addresses:" <> address_hash, _params, socket) do - {:ok, %{}, assign(socket, :address_hash, address_hash)} + def join("addresses:" <> address_hash_string, _params, socket) do + case valid_address_hash_and_not_restricted_access?(address_hash_string) do + :ok -> + {:ok, %{}, assign(socket, :address_hash, address_hash_string)} + + reason -> + {:error, %{reason: reason}} + end end def handle_in("get_balance", _, socket) do diff --git a/apps/block_scout_web/lib/block_scout_web/channels/reward_channel.ex b/apps/block_scout_web/lib/block_scout_web/channels/reward_channel.ex index e53ac616e7de..ecc3c851e322 100644 --- a/apps/block_scout_web/lib/block_scout_web/channels/reward_channel.ex +++ b/apps/block_scout_web/lib/block_scout_web/channels/reward_channel.ex @@ -10,10 +10,15 @@ defmodule BlockScoutWeb.RewardChannel do intercept(["new_reward"]) - def join("rewards:" <> address_hash, _params, socket) do - with {:ok, hash} <- Chain.string_to_address_hash(address_hash), - {:ok, address} <- Chain.hash_to_address(hash) do - {:ok, %{}, assign(socket, :current_address, address)} + def join("rewards:" <> address_hash_string, _params, socket) do + case valid_address_hash_and_not_restricted_access?(address_hash_string) do + :ok -> + {:ok, address_hash} = Chain.string_to_address_hash(address_hash_string) + {:ok, address} = Chain.hash_to_address(address_hash) + {:ok, %{}, assign(socket, :current_address, address)} + + reason -> + {:error, %{reason: reason}} end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex index 9f112f1ec32b..23edfed55c0e 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex @@ -11,7 +11,7 @@ defmodule BlockScoutWeb.AddressTransactionController do import Explorer.Chain.SmartContract, only: [burn_address_hash_string: 0] alias BlockScoutWeb.{AccessHelper, CaptchaHelper, Controller, TransactionView} - alias BlockScoutWeb.API.V2.CSVExportController + alias BlockScoutWeb.API.V2.{ApiView, CSVExportController} alias Explorer.{Chain, Market} alias Explorer.Chain.Address @@ -205,7 +205,10 @@ defmodule BlockScoutWeb.AddressTransactionController do not_found(conn) {:recaptcha, false} -> - not_found(conn) + conn + |> put_status(:forbidden) + |> put_view(ApiView) + |> render(:message, %{message: "Invalid reCAPTCHA response"}) end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex index ae1aa0a65165..a18106641e47 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex @@ -32,6 +32,7 @@ defmodule BlockScoutWeb.API.RPC.ContractController do @addresses_required "Query parameter contractaddresses is required" @contract_not_found "Smart-contract not found or is not verified" @restricted_access "Access to this address is restricted" + @not_a_smart_contract "The address is not a smart contract" @addresses_limit 10 @api_true [api?: true] @@ -71,6 +72,8 @@ defmodule BlockScoutWeb.API.RPC.ContractController do {:format, {:ok, casted_address_hash}} <- to_address_hash(address_hash), {:params, external_libraries} <- {:params, fetch_external_libraries(params)}, + {:not_a_smart_contract, bytecode} when bytecode != "0x" <- + {:not_a_smart_contract, Chain.smart_contract_bytecode(casted_address_hash, @api_true)}, {:publish, {:ok, _}} <- {:publish, Publisher.publish(address_hash, fetched_params, external_libraries)} do address = Contracts.address_hash_to_address_with_source_code(casted_address_hash) @@ -107,6 +110,9 @@ defmodule BlockScoutWeb.API.RPC.ContractController do {:params, {:error, error}} -> render(conn, :error, error: error) + + {:not_a_smart_contract, _} -> + render(conn, :error, error: @not_a_smart_contract, data: @not_a_smart_contract) end end @@ -147,8 +153,10 @@ defmodule BlockScoutWeb.API.RPC.ContractController do ) do with {:check_verified_status, false} <- {:check_verified_status, SmartContract.verified_with_full_match?(address_hash)}, - {:format, {:ok, _casted_address_hash}} <- to_address_hash(address_hash), + {:format, {:ok, casted_address_hash}} <- to_address_hash(address_hash), {:params, {:ok, fetched_params}} <- {:params, fetch_verifysourcecode_params(params)}, + {:not_a_smart_contract, bytecode} when bytecode != "0x" <- + {:not_a_smart_contract, Chain.smart_contract_bytecode(casted_address_hash, @api_true)}, uid <- VerificationStatus.generate_uid(address_hash) do Que.add(SolidityPublisherWorker, {"json_api", fetched_params, json_input, uid}) @@ -162,6 +170,9 @@ defmodule BlockScoutWeb.API.RPC.ContractController do {:params, {:error, error}} -> render(conn, :error, error: error, data: error) + + {:not_a_smart_contract, _} -> + render(conn, :error, error: @not_a_smart_contract, data: @not_a_smart_contract) end end @@ -178,8 +189,10 @@ defmodule BlockScoutWeb.API.RPC.ContractController do ) do with {:check_verified_status, false} <- {:check_verified_status, SmartContract.verified_with_full_match?(address_hash)}, - {:format, {:ok, _casted_address_hash}} <- to_address_hash(address_hash), + {:format, {:ok, casted_address_hash}} <- to_address_hash(address_hash), {:params, {:ok, fetched_params}} <- {:params, fetch_verifysourcecode_solidity_single_file_params(params)}, + {:not_a_smart_contract, bytecode} when bytecode != "0x" <- + {:not_a_smart_contract, Chain.smart_contract_bytecode(casted_address_hash, @api_true)}, external_libraries <- fetch_external_libraries_for_verifysourcecode(params), uid <- VerificationStatus.generate_uid(address_hash) do Que.add(SolidityPublisherWorker, {"flattened_api", fetched_params, external_libraries, uid}) @@ -194,6 +207,9 @@ defmodule BlockScoutWeb.API.RPC.ContractController do {:params, {:error, error}} -> render(conn, :error, error: error, data: error) + + {:not_a_smart_contract, _} -> + render(conn, :error, error: @not_a_smart_contract, data: @not_a_smart_contract) end end @@ -413,6 +429,8 @@ defmodule BlockScoutWeb.API.RPC.ContractController do def verify_vyper_contract(conn, %{"addressHash" => address_hash} = params) do with {:params, {:ok, fetched_params}} <- {:params, fetch_vyper_verify_params(params)}, {:format, {:ok, casted_address_hash}} <- to_address_hash(address_hash), + {:not_a_smart_contract, bytecode} when bytecode != "0x" <- + {:not_a_smart_contract, Chain.smart_contract_bytecode(casted_address_hash, @api_true)}, {:publish, {:ok, _}} <- {:publish, VyperPublisher.publish(address_hash, fetched_params)} do address = Contracts.address_hash_to_address_with_source_code(casted_address_hash) @@ -441,6 +459,9 @@ defmodule BlockScoutWeb.API.RPC.ContractController do {:params, {:error, error}} -> render(conn, :error, error: error) + + {:not_a_smart_contract, _} -> + render(conn, :error, error: @not_a_smart_contract, data: @not_a_smart_contract) end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/helper.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/helper.ex index d19627e62ec5..a533ff8b57df 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/helper.ex @@ -49,12 +49,7 @@ defmodule BlockScoutWeb.API.RPC.Helper do |> String.split(",") |> Enum.take(limit) |> Enum.uniq() - |> Enum.map(fn address -> - case Chain.string_to_address_hash(address) do - {:ok, address_hash} -> address_hash - _ -> nil - end - end) + |> Enum.map(&Chain.string_to_address_hash_or_nil/1) |> Enum.reject(&is_nil/1) end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex index 991c4936ca10..d0609634d62d 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/health_controller.ex @@ -4,30 +4,39 @@ defmodule BlockScoutWeb.API.V1.HealthController do import Plug.Conn alias Explorer.Chain + alias Explorer.Migrator.MigrationStatus alias Timex.Duration @ok_message "OK" + @backfill_multichain_search_db_migration_name "backfill_multichain_search_db" - def health(conn, _) do + # todo: remove *_old functions in the next releases when dependent parties start using new endpoint /api/health + def health_old(conn, params) do + health_old(conn, params, Application.get_env(:nft_media_handler, :standalone_media_worker?)) + end + + defp health_old(conn, _params, false) do with {:ok, number, timestamp} <- Chain.last_db_block_status(), {:ok, cache_number, cache_timestamp} <- Chain.last_cache_block_status() do - send_resp(conn, :ok, result(number, timestamp, cache_number, cache_timestamp)) + send_resp(conn, :ok, result_old(number, timestamp, cache_number, cache_timestamp)) else - status -> send_resp(conn, :internal_server_error, error(status)) + status -> send_resp(conn, :internal_server_error, error_old(status)) end end - def liveness(conn, _) do - send_resp(conn, :ok, @ok_message) - end - - def readiness(conn, _) do - Chain.last_db_block_status() - - send_resp(conn, :ok, @ok_message) + defp health_old(conn, _params, true) do + send_resp( + conn, + :ok, + %{ + "healthy" => true, + "data" => %{} + } + |> Jason.encode!() + ) end - def result(number, timestamp, cache_number, cache_timestamp) do + def result_old(number, timestamp, cache_number, cache_timestamp) do %{ "healthy" => true, "data" => %{ @@ -40,7 +49,7 @@ defmodule BlockScoutWeb.API.V1.HealthController do |> Jason.encode!() end - def error({:error, :no_blocks}) do + defp error_old({:error, :no_blocks}) do %{ "healthy" => false, "error_code" => 5002, @@ -50,7 +59,7 @@ defmodule BlockScoutWeb.API.V1.HealthController do |> Jason.encode!() end - def error({:error, number, timestamp}) do + defp error_old({:stale, number, timestamp}) do healthy_blocks_period = Application.get_env(:explorer, :healthy_blocks_period) healthy_blocks_period_formatted = @@ -72,4 +81,205 @@ defmodule BlockScoutWeb.API.V1.HealthController do } |> Jason.encode!() end + + @doc """ + Handles health checks for the application. + + This endpoint is used to determine if the application is healthy and operational. It performs checks on the status of the blockchain data in both the database and the cache. + + ## Parameters + + - conn: The connection struct representing the current HTTP connection. + - params: A map of parameters (not used in this function). + + ## Returns + + - The updated connection struct with the response sent. + + If the application is not running in standalone media worker mode, it retrieves the latest block number and timestamp from both the database and the cache. It then sends an HTTP 200 response with this information. + """ + @spec health(Plug.Conn.t(), map()) :: Plug.Conn.t() + def health(conn, params) do + health(conn, params, Application.get_env(:nft_media_handler, :standalone_media_worker?)) + end + + defp health(conn, _, false) do + with {:ok, latest_block_number_from_db, latest_block_timestamp_from_db} <- Chain.last_db_block_status(), + {:ok, latest_block_number_from_cache, latest_block_timestamp_from_cache} <- Chain.last_cache_block_status() do + send_resp( + conn, + :ok, + result( + latest_block_number_from_db, + latest_block_timestamp_from_db, + latest_block_number_from_cache, + latest_block_timestamp_from_cache + ) + ) + else + status -> send_resp(conn, :internal_server_error, encoded_error(status)) + end + end + + defp health(conn, _params, true) do + send_resp( + conn, + :ok, + %{ + "healthy" => true, + "data" => %{} + } + |> Jason.encode!() + ) + end + + @doc """ + Handles liveness checks for the application. + + This endpoint is used to determine if the application is running and able to handle requests. + It responds with an HTTP 200 status and a predefined message. + + ## Parameters + + - conn: The connection struct representing the current HTTP connection. + - _: A map of parameters (not used in this function). + + ## Returns + + - The updated connection struct with the response sent. + """ + @spec liveness(Plug.Conn.t(), map()) :: Plug.Conn.t() + def liveness(conn, _) do + send_resp(conn, :ok, @ok_message) + end + + @doc """ + Handles readiness checks for the application. + + This endpoint is used to determine if the application is ready to handle incoming requests. + It performs a conditional check on the application's environment configuration and responds with an HTTP 200 status and a predefined message. + + In the case of indexer/API application mode, it performs request in the DB to get the latest block. + + ## Parameters + + - conn: The connection struct representing the current HTTP connection. + - _: A map of parameters (not used in this function). + + ## Returns + + - The updated connection struct with the response sent. + """ + @spec readiness(Plug.Conn.t(), map()) :: Plug.Conn.t() + def readiness(conn, _) do + unless Application.get_env(:nft_media_handler, :standalone_media_worker?) do + Chain.last_db_block_status() + end + + send_resp(conn, :ok, @ok_message) + end + + @doc """ + Handles the request to check the status of the multichain search database export. + + Fetches the migration status for the multichain search database export and returns a JSON response + indicating whether the migration has finished and includes any associated metadata. + + ## Parameters + - conn: The connection struct. + - _params: The request parameters (not used in this function). + + ## Response + - A JSON response with the migration status and metadata. + + ## Examples + + iex> conn = %Plug.Conn{} + iex> multichain_search_db_export(conn, %{}) + %Plug.Conn{status: 200, resp_body: "{\"migration\":{\"finished\":false,\"meta\":{\"max_block_number\":6684354}}}"} + """ + @spec multichain_search_db_export(Plug.Conn.t(), map()) :: Plug.Conn.t() + def multichain_search_db_export(conn, _) do + case MigrationStatus.fetch(@backfill_multichain_search_db_migration_name) do + %{status: status, meta: meta} = _migration -> + response = + %{ + migration: %{ + finished: status == "completed", + metadata: meta + } + } + |> Jason.encode!() + + send_resp(conn, :ok, response) + + _ -> + send_resp(conn, :internal_server_error, Jason.encode!(%{error: "Failed to fetch migration status"})) + end + end + + defp result( + latest_block_number_from_db, + latest_block_timestamp_from_db, + latest_block_number_from_cache, + latest_block_timestamp_from_cache + ) do + %{ + healthy: true, + metadata: %{ + latest_block: %{ + db: %{ + number: to_string(latest_block_number_from_db), + timestamp: to_string(latest_block_timestamp_from_db) + }, + cache: %{ + number: to_string(latest_block_number_from_cache), + timestamp: to_string(latest_block_timestamp_from_cache) + } + } + } + } + |> Jason.encode!() + end + + defp encoded_error({:error, :no_blocks}) do + %{ + healthy: false, + error: error(5002, "There are no blocks in the DB.") + } + |> Jason.encode!() + end + + defp encoded_error({:stale, number, timestamp}) do + healthy_blocks_period = Application.get_env(:explorer, :healthy_blocks_period) + + healthy_blocks_period_minutes_formatted = + healthy_blocks_period + |> Duration.from_milliseconds() + |> Duration.to_minutes() + |> trunc() + + %{ + healthy: false, + error: + error( + 5001, + "There are no new blocks in the DB for the last #{healthy_blocks_period_minutes_formatted} mins. Check the healthiness of the JSON RPC archive node or the DB." + ), + metadata: %{ + latest_block: %{ + number: to_string(number), + timestamp: to_string(timestamp) + } + } + } + |> Jason.encode!() + end + + defp error(code, message) do + %{ + code: code, + message: message + } + end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index db7353cf8b02..1ade04a7d9ea 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -511,9 +511,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do token_balances: :token_balances_count, logs: :logs_count, withdrawals: :withdrawals_count, - # todo: support of 2 props in API endpoint is for compatibility with the current version of frontend. - # It should be ultimately removed. - internal_transactions: [:internal_transactions_count, :internal_txs_count], + internal_transactions: :internal_transactions_count, celo_election_rewards: :celo_election_rewards_count } @@ -525,15 +523,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do |> Map.fetch(counter_name) |> case do {:ok, json_field_name} -> - # todo: array-type value processing here is temporary. Please remove it with updating frontend to the new version. - if is_list(json_field_name) do - # credo:disable-for-next-line - Enum.reduce(json_field_name, acc, fn field_name, acc2 -> - Map.put(acc2, field_name, counter_value) - end) - else - Map.put(acc, json_field_name, counter_value) - end + Map.put(acc, json_field_name, counter_value) :error -> acc diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex index cfb074a7f8d1..8c70b87e74e8 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/advanced_filter_controller.ex @@ -194,8 +194,7 @@ defmodule BlockScoutWeb.API.V2.AdvancedFilterController do defp extract_filters(params) do [ - # TODO: remove when frontend is adopted to new naming - transaction_types: prepare_transaction_types(params["transaction_types"] || params["tx_types"]), + transaction_types: prepare_transaction_types(params["transaction_types"]), methods: params["methods"] |> prepare_methods(), age: prepare_age(params["age_from"], params["age_to"]), from_address_hashes: @@ -347,6 +346,7 @@ defmodule BlockScoutWeb.API.V2.AdvancedFilterController do defp paging_options(_), do: [paging_options: default_paging_options()] defp parse_nullable_integer_paging_parameter(""), do: {:ok, nil} + defp parse_nullable_integer_paging_parameter("null"), do: {:ok, nil} defp parse_nullable_integer_paging_parameter(string) when is_binary(string) do case Integer.parse(string) do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex index 2c2a538ac7d7..54a18765eff3 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/arbitrum_controller.ex @@ -11,8 +11,13 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do import Explorer.Chain.Arbitrum.DaMultiPurposeRecord.Helper, only: [calculate_celestia_data_key: 2] + alias Explorer.Arbitrum.ClaimRollupMessage + alias Explorer.Chain.Arbitrum.{L1Batch, Message} + alias Explorer.Chain.Hash alias Explorer.PagingOptions - alias Explorer.Chain.Arbitrum.{L1Batch, Message, Reader} + + alias Explorer.Chain.Arbitrum.Reader.API.Messages, as: MessagesReader + alias Explorer.Chain.Arbitrum.Reader.API.Settlement, as: SettlementReader action_fallback(BlockScoutWeb.API.V2.FallbackController) @@ -26,11 +31,10 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do options = params |> paging_options() - |> Keyword.put(:api?, true) {messages, next_page} = direction - |> Reader.messages(options) + |> MessagesReader.messages(options) |> split_list_by_page() next_page_params = @@ -38,7 +42,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do next_page, messages, params, - fn %Message{message_id: msg_id} -> %{"id" => msg_id} end + fn %Message{message_id: message_id} -> %{"id" => message_id} end ) conn @@ -56,7 +60,65 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do def messages_count(conn, %{"direction" => direction} = _params) do conn |> put_status(200) - |> render(:arbitrum_messages_count, %{count: Reader.messages_count(direction, api?: true)}) + |> render(:arbitrum_messages_count, %{count: MessagesReader.messages_count(direction)}) + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/messages/claim/:message_id` endpoint. + """ + @spec claim_message(Plug.Conn.t(), map()) :: Plug.Conn.t() + def claim_message(conn, %{"message_id" => message_id} = _params) do + message_id = String.to_integer(message_id) + + case ClaimRollupMessage.claim(message_id) do + {:ok, [contract_address: outbox_contract_address, calldata: calldata]} -> + conn + |> put_status(200) + |> render(:arbitrum_claim_message, %{calldata: calldata, address: outbox_contract_address}) + + {:error, :not_found} -> + conn + |> put_status(:not_found) + |> render(:message, %{message: "cannot find requested withdrawal"}) + + {:error, :sent} -> + conn + |> put_status(:bad_request) + |> render(:message, %{message: "withdrawal is unconfirmed yet"}) + + {:error, :initiated} -> + conn + |> put_status(:bad_request) + |> render(:message, %{message: "withdrawal is just initiated, please wait a bit"}) + + {:error, :relayed} -> + conn + |> put_status(:bad_request) + |> render(:message, %{message: "withdrawal was executed already"}) + + {:error, :internal_error} -> + conn + |> put_status(:not_found) + |> render(:message, %{message: "internal error occurred"}) + end + end + + @doc """ + Function to handle GET requests to `/api/v2/arbitrum/messages/withdrawals/:transaction_hash` endpoint. + """ + @spec withdrawals(Plug.Conn.t(), map()) :: Plug.Conn.t() + def withdrawals(conn, %{"transaction_hash" => transaction_hash} = _params) do + hash = + case Hash.Full.cast(transaction_hash) do + {:ok, address} -> address + _ -> nil + end + + withdrawals = ClaimRollupMessage.transaction_to_withdrawals(hash) + + conn + |> put_status(200) + |> render(:arbitrum_withdrawals, %{withdrawals: withdrawals}) end @doc """ @@ -64,11 +126,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do """ @spec batch(Plug.Conn.t(), map()) :: Plug.Conn.t() def batch(conn, %{"batch_number" => batch_number} = _params) do - case Reader.batch( - batch_number, - necessity_by_association: @batch_necessity_by_association, - api?: true - ) do + case SettlementReader.batch(batch_number, necessity_by_association: @batch_necessity_by_association) do {:ok, batch} -> conn |> put_status(200) @@ -82,16 +140,24 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do @doc """ Function to handle GET requests to `/api/v2/arbitrum/batches/da/:data_hash` or `/api/v2/arbitrum/batches/da/:transaction_commitment/:height` endpoints. + + For AnyTrust data hash, the function can be called in two ways: + 1. Without type parameter - returns the most recent batch for the data hash + 2. With type=all parameter - returns all batches for the data hash + + ## Parameters + - `conn`: The connection struct + - `params`: A map that may contain: + * `data_hash` - The AnyTrust data hash + * `transaction_commitment` and `height` - For Celestia data + * `type` - Optional parameter to specify return type ("all" for all batches) """ @spec batch_by_data_availability_info(Plug.Conn.t(), map()) :: Plug.Conn.t() - def batch_by_data_availability_info(conn, %{"data_hash" => data_hash} = _params) do + def batch_by_data_availability_info(conn, %{"data_hash" => data_hash} = params) do # In case of AnyTrust, `data_key` is the hash of the data itself - case Reader.get_da_record_by_data_key(data_hash, api?: true) do - {:ok, {batch_number, _}} -> - batch(conn, %{"batch_number" => batch_number}) - - {:error, :not_found} = res -> - res + case Map.get(params, "type") do + "all" -> all_batches_by_data_availability_info(conn, data_hash, params) + _ -> one_batch_by_data_availability_info(conn, data_hash, params) end end @@ -102,7 +168,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do # In case of Celestia, `data_key` is the hash of the height and the commitment hash with {:ok, :hash, transaction_commitment_hash} <- parse_block_hash_or_number_param(transaction_commitment), key <- calculate_celestia_data_key(height, transaction_commitment_hash) do - case Reader.get_da_record_by_data_key(key, api?: true) do + case SettlementReader.get_da_record_by_data_key(key) do {:ok, {batch_number, _}} -> batch(conn, %{"batch_number" => batch_number}) @@ -115,6 +181,47 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do end end + # Gets the most recent batch associated with the given DA blob hash. + # + # ## Parameters + # - `conn`: The connection struct + # - `data_hash`: The AnyTrust data hash + # - `params`: The original request parameters + # + # ## Returns + # - The connection struct with rendered response + @spec one_batch_by_data_availability_info(Plug.Conn.t(), binary(), map()) :: Plug.Conn.t() + defp one_batch_by_data_availability_info(conn, data_hash, _params) do + case SettlementReader.get_da_record_by_data_key(data_hash) do + {:ok, {batch_number, _}} -> + batch(conn, %{"batch_number" => batch_number}) + + {:error, :not_found} = res -> + res + end + end + + # Gets all batches associated with the given DA blob hash. + # + # ## Parameters + # - `conn`: The connection struct + # - `data_hash`: The AnyTrust data hash + # - `params`: The original request parameters (for pagination) + # + # ## Returns + # - The connection struct with rendered response + @spec all_batches_by_data_availability_info(Plug.Conn.t(), binary(), map()) :: Plug.Conn.t() + defp all_batches_by_data_availability_info(conn, data_hash, params) do + case SettlementReader.get_all_da_records_by_data_key(data_hash) do + {:ok, {batch_numbers, _}} -> + params = Map.put(params, "batch_numbers", batch_numbers) + batches(conn, params) + + {:error, :not_found} = res -> + res + end + end + @doc """ Function to handle GET requests to `/api/v2/arbitrum/batches/count` endpoint. """ @@ -122,20 +229,30 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do def batches_count(conn, _params) do conn |> put_status(200) - |> render(:arbitrum_batches_count, %{count: Reader.batches_count(api?: true)}) + |> render(:arbitrum_batches_count, %{count: SettlementReader.batches_count()}) end @doc """ Function to handle GET requests to `/api/v2/arbitrum/batches` endpoint. + + The function can be called in two ways: + 1. Without batch_numbers parameter - returns batches according to pagination parameters + 2. With batch_numbers parameter - returns only batches with specified numbers, still applying pagination + + ## Parameters + - `conn`: The connection struct + - `params`: A map that may contain: + * `batch_numbers` - Optional list of specific batch numbers to retrieve + * Standard pagination parameters """ @spec batches(Plug.Conn.t(), map()) :: Plug.Conn.t() def batches(conn, params) do {batches, next_page} = params |> paging_options() + |> maybe_add_batch_numbers(params) |> Keyword.put(:necessity_by_association, @batch_necessity_by_association) - |> Keyword.put(:api?, true) - |> Reader.batches() + |> SettlementReader.batches() |> split_list_by_page() next_page_params = @@ -154,6 +271,21 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do }) end + # Adds batch_numbers to options if they are present in params. + # + # ## Parameters + # - `options`: The keyword list of options to potentially extend + # - `params`: The params map that may contain batch_numbers + # + # ## Returns + # - The options keyword list, potentially extended with batch_numbers + @spec maybe_add_batch_numbers(Keyword.t(), map()) :: Keyword.t() + defp maybe_add_batch_numbers(options, %{"batch_numbers" => batch_numbers}) when is_list(batch_numbers) do + Keyword.put(options, :batch_numbers, batch_numbers) + end + + defp maybe_add_batch_numbers(options, _params), do: options + @doc """ Function to handle GET requests to `/api/v2/main-page/arbitrum/batches/committed` endpoint. """ @@ -162,9 +294,8 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do batches = [] |> Keyword.put(:necessity_by_association, @batch_necessity_by_association) - |> Keyword.put(:api?, true) |> Keyword.put(:committed?, true) - |> Reader.batches() + |> SettlementReader.batches() conn |> put_status(200) @@ -182,7 +313,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do end defp batch_latest_number do - case Reader.batch(:latest, api?: true) do + case SettlementReader.batch(:latest) do {:ok, batch} -> batch.number {:error, :not_found} -> 0 end @@ -193,7 +324,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumController do """ @spec recent_messages_to_l2(Plug.Conn.t(), map()) :: Plug.Conn.t() def recent_messages_to_l2(conn, _params) do - messages = Reader.relayed_l1_to_l2_messages(paging_options: %PagingOptions{page_size: 6}, api?: true) + messages = MessagesReader.relayed_l1_to_l2_messages(paging_options: %PagingOptions{page_size: 6}) conn |> put_status(200) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex index 2fb34766ab51..f5bef7fd0197 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/block_controller.ex @@ -31,7 +31,7 @@ defmodule BlockScoutWeb.API.V2.BlockController do } alias Explorer.Chain - alias Explorer.Chain.Arbitrum.Reader, as: ArbitrumReader + alias Explorer.Chain.Arbitrum.Reader.API.Settlement, as: ArbitrumSettlementReader alias Explorer.Chain.Celo.ElectionReward, as: CeloElectionReward alias Explorer.Chain.Celo.EpochReward, as: CeloEpochReward alias Explorer.Chain.Celo.Reader, as: CeloReader @@ -193,11 +193,10 @@ defmodule BlockScoutWeb.API.V2.BlockController do params |> select_block_type() |> Keyword.merge(paging_options(params)) - |> Keyword.merge(@api_true) {blocks, next_page} = batch_number - |> ArbitrumReader.batch_blocks(full_options) + |> ArbitrumSettlementReader.batch_blocks(full_options) |> split_list_by_page() next_page_params = next_page |> next_page_params(blocks, delete_parameters_from_next_page_params(params)) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex index 127bfe7840ed..a1ef993df7c5 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/fallback_controller.ex @@ -33,6 +33,7 @@ defmodule BlockScoutWeb.API.V2.FallbackController do @transaction_interpreter_service_disabled "Transaction Interpretation Service is disabled" @disabled "API endpoint is disabled" @service_disabled "Service is disabled" + @not_a_smart_contract "Address is not a smart-contract" def call(conn, {:format, _params}) do Logger.error(fn -> @@ -310,6 +311,13 @@ defmodule BlockScoutWeb.API.V2.FallbackController do |> render(:message, %{message: @service_disabled}) end + def call(conn, {:not_a_smart_contract, _}) do + conn + |> put_status(:not_found) + |> put_view(ApiView) + |> render(:message, %{message: @not_a_smart_contract}) + end + def call(conn, {code, response}) when is_integer(code) do conn |> put_status(code) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex index b74df7c614f3..ee8998b20a49 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex @@ -82,17 +82,30 @@ defmodule BlockScoutWeb.API.V2.OptimismController do Task.async(fn -> l2_block_number_from = TransactionBatch.edge_l2_block_number(fs.id, :min) l2_block_number_to = TransactionBatch.edge_l2_block_number(fs.id, :max) - transaction_count = Transaction.transaction_count_for_block_range(l2_block_number_from..l2_block_number_to) + + l2_block_range = + if not is_nil(l2_block_number_from) and not is_nil(l2_block_number_to) do + l2_block_number_from..l2_block_number_to + end + + # credo:disable-for-lines:2 Credo.Check.Refactor.Nesting + transaction_count = + case l2_block_range do + nil -> 0 + range -> Transaction.transaction_count_for_block_range(range) + end + {batch_data_container, _} = FrameSequenceBlob.list(fs.id, api?: true) fs - |> Map.put(:l2_block_range, l2_block_number_from..l2_block_number_to) + |> Map.put(:l2_block_range, l2_block_range) |> Map.put(:transaction_count, transaction_count) |> Map.put(:batch_data_container, batch_data_container) end) end) |> Task.yield_many(:infinity) |> Enum.map(fn {_task, {:ok, item}} -> item end) + |> Enum.reject(&is_nil(&1.l2_block_range)) conn |> put_status(200) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex index ce77a28fd93b..8deb4c609579 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex @@ -181,12 +181,7 @@ defmodule BlockScoutWeb.API.V2.Proxy.AccountAbstractionController do address_hash_strings |> Enum.reject(&is_nil/1) |> Enum.uniq() - |> Enum.map(fn hash_string -> - case Chain.string_to_address_hash(hash_string) do - {:ok, hash} -> hash - _ -> nil - end - end) + |> Enum.map(&Chain.string_to_address_hash_or_nil/1) |> Enum.reject(&is_nil/1) end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/search_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/search_controller.ex index af3324ad1052..119c39aeb695 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/search_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/search_controller.ex @@ -1,7 +1,7 @@ defmodule BlockScoutWeb.API.V2.SearchController do use Phoenix.Controller - import BlockScoutWeb.Chain, only: [paging_options: 1, next_page_params: 3, split_list_by_page: 1, from_param: 1] + import BlockScoutWeb.Chain, only: [from_param: 1] import Explorer.MicroserviceInterfaces.BENS, only: [maybe_preload_ens_info_to_search_results: 1] alias Explorer.Chain.Search @@ -10,26 +10,11 @@ defmodule BlockScoutWeb.API.V2.SearchController do @api_true [api?: true] @min_query_length 3 - def search(conn, %{"q" => query}) when byte_size(query) < @min_query_length do - conn - |> put_status(200) - |> render(:search_results, %{ - search_results: [], - next_page_params: nil - }) - end - def search(conn, %{"q" => query} = params) do - [paging_options: paging_options] = paging_options(params) - offset = (max(paging_options.page_number, 1) - 1) * paging_options.page_size + [paging_options: paging_options] = Search.parse_paging_options(params) - search_results_plus_one = - paging_options - |> Search.joint_search(offset, query, @api_true) - - {search_results, next_page} = split_list_by_page(search_results_plus_one) - - next_page_params = next_page_params(next_page, search_results, params) + {search_results, next_page_params} = + paging_options |> Search.joint_search(query, @api_true) conn |> put_status(200) @@ -39,12 +24,6 @@ defmodule BlockScoutWeb.API.V2.SearchController do }) end - def check_redirect(conn, %{"q" => query}) when byte_size(query) < @min_query_length do - conn - |> put_status(200) - |> render(:search_results, %{result: {:error, :not_found}}) - end - def check_redirect(conn, %{"q" => query}) do result = query diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/stats_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/stats_controller.ex index 9a00ea3b7065..bfe229ba620d 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/stats_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/stats_controller.ex @@ -110,8 +110,7 @@ defmodule BlockScoutWeb.API.V2.StatsController do transaction_history_data = date_range |> Enum.map(fn row -> - # todo: keep `tx_count` for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - %{date: row.date, transaction_count: row.number_of_transactions, tx_count: row.number_of_transactions} + %{date: row.date, transaction_count: row.number_of_transactions} end) json(conn, %{ diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex index 42b17d3e76a0..82d6ed0b536b 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/token_controller.ex @@ -370,6 +370,6 @@ defmodule BlockScoutWeb.API.V2.TokenController do token_instance, token ) do - %{token_instance | token: token} + %Instance{token_instance | token: token} end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index 668cf6fa1945..9c4f2c962421 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -40,7 +40,7 @@ defmodule BlockScoutWeb.API.V2.TransactionController do alias BlockScoutWeb.MicroserviceInterfaces.TransactionInterpretation, as: TransactionInterpretationService alias BlockScoutWeb.Models.TransactionStateHelper alias Explorer.{Chain, PagingOptions, Repo} - alias Explorer.Chain.Arbitrum.Reader, as: ArbitrumReader + alias Explorer.Chain.Arbitrum.Reader.API.Settlement, as: ArbitrumSettlementReader alias Explorer.Chain.Beacon.Reader, as: BeaconReader alias Explorer.Chain.{Hash, InternalTransaction, Transaction} alias Explorer.Chain.Optimism.TransactionBatch, as: OptimismTransactionBatch @@ -252,7 +252,7 @@ defmodule BlockScoutWeb.API.V2.TransactionController do """ @spec arbitrum_batch(Plug.Conn.t(), map()) :: Plug.Conn.t() def arbitrum_batch(conn, params) do - handle_batch_transactions(conn, params, &ArbitrumReader.batch_transactions/2) + handle_batch_transactions(conn, params, &ArbitrumSettlementReader.batch_transactions/2) end @doc """ diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/verification_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/verification_controller.ex index 89e0c722a5c1..c0ac51574435 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/verification_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/verification_controller.ex @@ -395,6 +395,8 @@ defmodule BlockScoutWeb.API.V2.VerificationController do defp validate_address(%{"address_hash" => address_hash_string} = params) do with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address_hash_string)}, + {:not_a_smart_contract, bytecode} when bytecode != "0x" <- + {:not_a_smart_contract, Chain.smart_contract_bytecode(address_hash, @api_true)}, {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, params), {:already_verified, false} <- {:already_verified, SmartContract.verified_with_full_match?(address_hash, @api_true)} do diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/chain_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/chain_controller.ex index a867058d4b5a..191515467ef0 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/chain_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/chain_controller.ex @@ -88,11 +88,10 @@ defmodule BlockScoutWeb.ChainController do def token_autocomplete(conn, %{"q" => term} = params) when is_binary(term) do [paging_options: paging_options] = paging_options(params) - offset = (max(paging_options.page_number, 1) - 1) * paging_options.page_size - results = + {results, _} = paging_options - |> Search.joint_search(offset, term) + |> Search.joint_search(term) encoded_results = results diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/search_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/search_controller.ex index a797076826b6..c8292be760f6 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/search_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/search_controller.ex @@ -1,36 +1,19 @@ defmodule BlockScoutWeb.SearchController do use BlockScoutWeb, :controller - import BlockScoutWeb.Chain, only: [paging_options: 1, next_page_params: 3, split_list_by_page: 1] - alias BlockScoutWeb.{Controller, SearchView} alias Explorer.Chain.Search alias Phoenix.View - @min_query_length 3 - - def search_results(conn, %{"q" => query, "type" => "JSON"}) when byte_size(query) < @min_query_length do - json( - conn, - %{ - items: [], - next_page_path: nil - } - ) - end - def search_results(conn, %{"q" => query, "type" => "JSON"} = params) do - [paging_options: paging_options] = paging_options(params) - offset = (max(paging_options.page_number, 1) - 1) * paging_options.page_size + [paging_options: paging_options] = Search.parse_paging_options(params) - search_results_plus_one = + {search_results, next_page_params} = paging_options - |> Search.joint_search(offset, query) - - {search_results, next_page} = split_list_by_page(search_results_plus_one) + |> Search.joint_search(query) next_page_url = - case next_page_params(next_page, search_results, params) do + case next_page_params do nil -> nil diff --git a/apps/block_scout_web/lib/block_scout_web/health_endpoint.ex b/apps/block_scout_web/lib/block_scout_web/health_endpoint.ex new file mode 100644 index 000000000000..d821ad689c85 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/health_endpoint.ex @@ -0,0 +1,17 @@ +defmodule BlockScoutWeb.HealthEndpoint do + @moduledoc """ + Health endpoint for health checks in case of indexer-only/standalone_media_worker setup + """ + use Phoenix.Endpoint, otp_app: :block_scout_web + + plug(BlockScoutWeb.HealthRouter) + + def init(_key, config) do + if config[:load_from_system_env] do + port = System.get_env("PORT") || raise "expected the PORT environment variable to be set" + {:ok, Keyword.put(config, :http, [:inet6, port: port])} + else + {:ok, config} + end + end +end diff --git a/apps/block_scout_web/lib/block_scout_web/microservice_interfaces/transaction_interpretation.ex b/apps/block_scout_web/lib/block_scout_web/microservice_interfaces/transaction_interpretation.ex index a2cc867cfa5e..d02b225a860b 100644 --- a/apps/block_scout_web/lib/block_scout_web/microservice_interfaces/transaction_interpretation.ex +++ b/apps/block_scout_web/lib/block_scout_web/microservice_interfaces/transaction_interpretation.ex @@ -102,7 +102,7 @@ defmodule BlockScoutWeb.MicroserviceInterfaces.TransactionInterpretation do defp try_get_cached_value(hash) do with {:ok, %Response{body: body, status_code: 200}} <- HTTPoison.get(cache_url(hash)), {:ok, json} <- body |> Jason.decode() do - {:ok, json |> Map.get("response") |> Map.put("success", true)} |> preload_template_variables() + {:ok, json} |> preload_template_variables() else _ -> :no_cached_data @@ -178,8 +178,6 @@ defmodule BlockScoutWeb.MicroserviceInterfaces.TransactionInterpretation do value: transaction_with_meta.value, method: Transaction.method_name(transaction_with_meta, Transaction.format_decoded_input(decoded_input)), status: transaction_with_meta.status, - # todo: keep `tx_types` for compatibility with interpreter and remove when new interpreter is bound to `transaction_types` property - tx_types: TransactionView.transaction_types(transaction_with_meta), transaction_types: TransactionView.transaction_types(transaction_with_meta), raw_input: transaction_with_meta.input, decoded_input: decoded_input_data, diff --git a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex index 77c7b5409d62..41b8bf60c60b 100644 --- a/apps/block_scout_web/lib/block_scout_web/paging_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/paging_helper.ex @@ -8,7 +8,7 @@ defmodule BlockScoutWeb.PagingHelper do import Explorer.Chain.SmartContract.Proxy.Models.Implementation, only: [proxy_implementations_association: 0] alias Explorer.Chain.Stability.Validator, as: ValidatorStability - alias Explorer.Chain.Transaction + alias Explorer.Chain.{SmartContract, Transaction} alias Explorer.{Helper, PagingOptions, SortingHelper} @page_size 50 @@ -226,16 +226,13 @@ defmodule BlockScoutWeb.PagingHelper do def delete_parameters_from_next_page_params(_), do: nil - def current_filter(%{"filter" => "solidity"}) do - [filter: :solidity] - end - - def current_filter(%{"filter" => "vyper"}) do - [filter: :vyper] - end - - def current_filter(%{"filter" => "yul"}) do - [filter: :yul] + def current_filter(%{"filter" => language_string}) do + SmartContract.language_string_to_atom() + |> Map.fetch(language_string) + |> case do + {:ok, language} -> [filter: language] + :error -> [] + end end def current_filter(_), do: [] diff --git a/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex index ece2cf66fb39..5e0f32ddf979 100644 --- a/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex @@ -423,6 +423,8 @@ defmodule BlockScoutWeb.Routers.ApiRouter do if @chain_type == :arbitrum do get("/messages/:direction", V2.ArbitrumController, :messages) get("/messages/:direction/count", V2.ArbitrumController, :messages_count) + get("/messages/claim/:message_id", V2.ArbitrumController, :claim_message) + get("/messages/withdrawals/:transaction_hash", V2.ArbitrumController, :withdrawals) get("/batches", V2.ArbitrumController, :batches) get("/batches/count", V2.ArbitrumController, :batches_count) get("/batches/:batch_number", V2.ArbitrumController, :batch) @@ -477,8 +479,9 @@ defmodule BlockScoutWeb.Routers.ApiRouter do get("/celo-election-rewards-csv", AddressTransactionController, :celo_election_rewards_csv) end + # todo: remove it in the future. Path /api/health should be used instead. scope "/health" do - get("/", HealthController, :health) + get("/", HealthController, :health_old) get("/liveness", HealthController, :liveness) get("/readiness", HealthController, :readiness) end @@ -508,6 +511,14 @@ defmodule BlockScoutWeb.Routers.ApiRouter do end end + scope "/health" do + alias BlockScoutWeb.API.V1.HealthController + get("/", HealthController, :health) + get("/liveness", HealthController, :liveness) + get("/readiness", HealthController, :readiness) + get("/multichain-search-export", HealthController, :multichain_search_db_export) + end + # For backward compatibility. Should be removed scope "/" do pipe_through(:api) diff --git a/apps/block_scout_web/lib/block_scout_web/templates/transaction/_actions.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/transaction/_actions.html.eex index 982434fcbd37..391b9127a168 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/transaction/_actions.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/transaction/_actions.html.eex @@ -87,7 +87,7 @@ <% address = if address_status == :ok, do: render_to_string(BlockScoutWeb.AddressView, "_link.html", address: address, contract: Address.smart_contract?(address), use_custom_tooltip: false, trimmed: false), else: render_to_string(BlockScoutWeb.TransactionView, "_actions_address.html", address_string: address_string, action: @action) %> <% to_address = Map.get(@action.data, "to") %> <% to_content = raw(render_to_string BlockScoutWeb.TransactionView, "_actions_to.html", address: to_address) %> - <% to = link to: address_path(BlockScoutWeb.Endpoint, :show, to_address), "data-test": "address_hash_link", do: to_content %> + <% to = link(to_content, to: address_path(BlockScoutWeb.Endpoint, :show, to_address), "data-test": "address_hash_link") %> <%= gettext("Mint of %{address} To %{to}", address: address, to: safe_to_string(to)) |> raw() %> diff --git a/apps/block_scout_web/lib/block_scout_web/templates/transaction/_decoded_input_body.html.eex b/apps/block_scout_web/lib/block_scout_web/templates/transaction/_decoded_input_body.html.eex index 3675d4c501a9..ad1d04b4f12f 100644 --- a/apps/block_scout_web/lib/block_scout_web/templates/transaction/_decoded_input_body.html.eex +++ b/apps/block_scout_web/lib/block_scout_web/templates/transaction/_decoded_input_body.html.eex @@ -50,9 +50,8 @@ <% end %> - <% value_with_links = BlockScoutWeb.ABIEncodedValueView.value_html(type, value, false)%> - <% string = template_to_string(value_with_no_links) %> -
<%= if String.length(string) > max_length do %>
<% input = trim(max_length, string) %><%= input[:show] %>...<%= input[:hide] %>
<% else %><%= value_with_links %><% end %>
+ <% value_with_links = BlockScoutWeb.ABIEncodedValueView.value_html(type, value, false) %> +
<%= if String.length(value_with_no_links) > max_length do %>
<% input = trim(max_length, value_with_no_links) %><%= input[:show] %>...<%= input[:hide] %>
<% else %><%= value_with_links %><% end %>
<% end %> diff --git a/apps/block_scout_web/lib/block_scout_web/views/abi_encoded_value_view.ex b/apps/block_scout_web/lib/block_scout_web/views/abi_encoded_value_view.ex index 12bde160651b..8e57e280a660 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/abi_encoded_value_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/abi_encoded_value_view.ex @@ -7,9 +7,12 @@ defmodule BlockScoutWeb.ABIEncodedValueView do """ use BlockScoutWeb, :view + import Phoenix.LiveView.Helpers, only: [sigil_H: 2] + alias ABI.FunctionSelector alias Explorer.Chain.{Address, Hash} alias Phoenix.HTML + alias Phoenix.HTML.Safe require Logger @@ -68,7 +71,11 @@ defmodule BlockScoutWeb.ABIEncodedValueView do |> Enum.map(&do_copy_text(type, &1)) |> Enum.intersperse(", ") - ~E|[<%= values %>]| + assigns = %{values: values} + + ~H|[<%= @values %>]| + |> Safe.to_iodata() + |> List.to_string() end defp do_copy_text(_, {:dynamic, value}) do @@ -87,7 +94,11 @@ defmodule BlockScoutWeb.ABIEncodedValueView do |> Enum.map(fn {val, ind} -> do_copy_text(Enum.at(types, ind), val) end) |> Enum.intersperse(", ") - ~E|(<%= values %>)| + assigns = %{values: values} + + ~H|(<%= @values %>)| + |> Safe.to_iodata() + |> List.to_string() end defp do_copy_text(_type, value) do @@ -113,7 +124,23 @@ defmodule BlockScoutWeb.ABIEncodedValueView do spacing = String.duplicate(" ", depth * 2) delimited = Enum.intersperse(values, ",\n") - ~E|<%= spacing %>[<%= "\n" %><%= delimited %><%= "\n" %><%= spacing %>]| + assigns = %{spacing: spacing, delimited: delimited} + + elements = + Enum.reduce(delimited, "", fn value, acc -> + assigns = %{value: value} + + html = ~H|<%= raw(@value) %>| |> Safe.to_iodata() |> List.to_string() + acc <> html + end) + + (~H|<%= @spacing %>[<%= "\n" %>| + |> Safe.to_iodata() + |> List.to_string()) <> + elements <> + (~H|<%= "\n" %><%= @spacing %>]| + |> Safe.to_iodata() + |> List.to_string()) end defp do_value_html({:tuple, types}, values, no_links, _) do @@ -126,17 +153,28 @@ defmodule BlockScoutWeb.ABIEncodedValueView do end) delimited = Enum.intersperse(values_list, ",") - ~E|(<%= delimited %>)| + assigns = %{delimited: delimited} + + ~H|(<%= for value <- @delimited, do: raw(value) %>)| + |> Safe.to_iodata() + |> List.to_string() end defp do_value_html(type, value, no_links, depth) do spacing = String.duplicate(" ", depth * 2) - ~E|<%= spacing %><%=base_value_html(type, value, no_links)%>| - [spacing, base_value_html(type, value, no_links)] + html = base_value_html(type, value, no_links) + + assigns = %{html: html, spacing: spacing} + + ~H|<%= @spacing %><%= @html %>| + |> Safe.to_iodata() + |> List.to_string() end defp base_value_html(_, {:dynamic, value}, _no_links) do - ~E|<%= hex(value) %>| + assigns = %{value: value} + + ~H|<%= hex(@value) %>| end defp base_value_html(:address, value, no_links) do @@ -144,17 +182,24 @@ defmodule BlockScoutWeb.ABIEncodedValueView do base_value_html(:address_text, value, no_links) else address = hex(value) + path = address_path(BlockScoutWeb.Endpoint, :show, address) - ~E|<%= address %>| + assigns = %{address: address, path: path} + + ~H|<%= @address %>| end end defp base_value_html(:address_text, value, _no_links) do - ~E|<%= hex(value) %>| + assigns = %{value: value} + + ~H|<%= hex(@value) %>| end defp base_value_html(:bytes, value, _no_links) do - ~E|<%= hex(value) %>| + assigns = %{value: value} + + ~H|<%= hex(@value) %>| end defp base_value_html(_, value, _no_links), do: HTML.html_escape(value) diff --git a/apps/block_scout_web/lib/block_scout_web/views/access_helper.ex b/apps/block_scout_web/lib/block_scout_web/views/access_helper.ex index 440ca2efed15..c8c27436ea52 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/access_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/access_helper.ex @@ -9,7 +9,7 @@ defmodule BlockScoutWeb.AccessHelper do alias BlockScoutWeb.API.RPC.RPCView alias BlockScoutWeb.API.V2.ApiView alias BlockScoutWeb.Routers.WebRouter.Helpers - alias Explorer.AccessHelper + alias Explorer.{AccessHelper, Chain} alias Explorer.Account.Api.Key, as: ApiKey alias Plug.Conn @@ -17,10 +17,37 @@ defmodule BlockScoutWeb.AccessHelper do require Logger + @invalid_address_hash "Invalid address hash" + @restricted_access "Restricted access" + def restricted_access?(address_hash, params) do AccessHelper.restricted_access?(address_hash, params) end + @doc """ + Checks if the given address hash string is valid and not restricted. + + ## Parameters + - address_hash_string: A string representing the address hash to be validated. + + ## Returns + - :ok if the address hash is valid and access is not restricted. + - binary with reason otherwise. + """ + @spec valid_address_hash_and_not_restricted_access?(binary()) :: :ok | binary() + def valid_address_hash_and_not_restricted_access?(address_hash_string) do + with address_hash when not is_nil(address_hash) <- Chain.string_to_address_hash_or_nil(address_hash_string), + {:ok, false} <- AccessHelper.restricted_access?(address_hash_string, %{}) do + :ok + else + nil -> + @invalid_address_hash + + {:restricted_access, true} -> + @restricted_access + end + end + def get_path(conn, path, template, address_hash) do basic_args = [conn, template, address_hash] key = get_restricted_key(conn) diff --git a/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/tags_view.ex b/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/tags_view.ex index 06bb0ce49fcb..50403cf0f2f1 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/tags_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/account/api/v2/tags_view.ex @@ -13,8 +13,6 @@ defmodule BlockScoutWeb.Account.API.V2.TagsView do }) do %{ personal_transaction_tag: prepare_transaction_tag(personal_transaction_tag), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `personal_transaction_tag` property - personal_tx_tag: prepare_transaction_tag(personal_transaction_tag), personal_tags: personal_tags, watchlist_names: watchlist_names, common_tags: common_tags diff --git a/apps/block_scout_web/lib/block_scout_web/views/address_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/address_contract_view.ex index 0b91ef2cd8e6..f1fa48c76225 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/address_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/address_contract_view.ex @@ -4,6 +4,7 @@ defmodule BlockScoutWeb.AddressContractView do require Logger import Explorer.Helper, only: [decode_data: 2] + import Phoenix.LiveView.Helpers, only: [sigil_H: 2] alias ABI.FunctionSelector alias Explorer.Chain @@ -11,6 +12,7 @@ defmodule BlockScoutWeb.AddressContractView do alias Explorer.Chain.SmartContract alias Explorer.Chain.SmartContract.Proxy.EIP1167 alias Explorer.SmartContract.Helper, as: SmartContractHelper + alias Phoenix.HTML.Safe def render("scripts.html", %{conn: conn}) do render_scripts(conn, "address_contract/code_highlighting.js") @@ -41,11 +43,14 @@ defmodule BlockScoutWeb.AddressContractView do |> Enum.zip(constructor_abi["inputs"]) |> Enum.reduce({0, "#{contract.constructor_arguments}\n\n"}, fn {val, %{"type" => type}}, {count, acc} -> formatted_val = val_to_string(val, type, conn) + assigns = %{acc: acc, count: count, type: type, formatted_val: formatted_val} {count + 1, - ~E""" - <%= acc %>Arg [<%= count %>] (<%= type %>) : <%= formatted_val %> - """} + ~H""" + <%= @acc %> Arg [<%= @count %>] (<%= @type %>) : <%= @formatted_val %> + """ + |> Safe.to_iodata() + |> List.to_string()} end) result @@ -61,7 +66,7 @@ defmodule BlockScoutWeb.AddressContractView do type =~ "address" -> address_hash = "0x" <> Base.encode16(val, case: :lower) - address = get_address(address_hash) + address = Chain.string_to_address_hash_or_nil(address_hash) get_formatted_address_data(address, address_hash, conn) @@ -82,16 +87,13 @@ defmodule BlockScoutWeb.AddressContractView do end end - defp get_address(address_hash) do - case Chain.string_to_address_hash(address_hash) do - {:ok, address} -> address - _ -> nil - end - end - defp get_formatted_address_data(address, address_hash, conn) do if address != nil do - ~E"><%= address_hash %>" + assigns = %{address: address, address_hash: address_hash, conn: conn} + + ~H""" + <%= @address_hash %> + """ else address_hash end @@ -99,11 +101,14 @@ defmodule BlockScoutWeb.AddressContractView do def format_external_libraries(libraries, conn) do Enum.reduce(libraries, "", fn %{name: name, address_hash: address_hash}, acc -> - address = get_address(address_hash) + address = Chain.string_to_address_hash_or_nil(address_hash) + assigns = %{acc: acc, name: name, address: address, address_hash: address_hash, conn: conn} - ~E""" - <%= acc %><%= name %> : <%= get_formatted_address_data(address, address_hash, conn) %> + ~H""" + <%= @acc %><%= @name %> : <%= get_formatted_address_data(@address, @address_hash, @conn) %> """ + |> Safe.to_iodata() + |> List.to_string() end) end diff --git a/apps/block_scout_web/lib/block_scout_web/views/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/address_view.ex index fea2c32b5a2b..70a6cf5b6e76 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/address_view.ex @@ -4,6 +4,7 @@ defmodule BlockScoutWeb.AddressView do require Logger alias BlockScoutWeb.{AccessHelper, LayoutView} + alias BlockScoutWeb.API.V2.Helper, as: APIV2Helper alias Explorer.Account.CustomABI alias Explorer.{Chain, CustomContractsHelper, Repo} alias Explorer.Chain.Address.Counters @@ -177,15 +178,8 @@ defmodule BlockScoutWeb.AddressView do @doc """ Returns the primary name of an address if available. If there is no names on address function performs preload of names association. """ - def primary_name(%Address{names: [_ | _] = address_names}) do - case Enum.find(address_names, &(&1.primary == true)) do - nil -> - %Address.Name{name: name} = Enum.at(address_names, 0) - name - - %Address.Name{name: name} -> - name - end + def primary_name(%Address{names: [_ | _]} = address) do + APIV2Helper.address_name(address) end def primary_name(%Address{names: %Ecto.Association.NotLoaded{}} = address) do diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex index bbc0d6741e12..a6c562e30835 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/address_view.ex @@ -82,8 +82,6 @@ defmodule BlockScoutWeb.API.V2.AddressView do def prepare_address({address, transaction_count}) do nil |> Helper.address_with_info(address, address.hash, true) - # todo: keep `tx_count` for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - |> Map.put(:tx_count, to_string(transaction_count)) |> Map.put(:transaction_count, to_string(transaction_count)) |> Map.put(:coin_balance, if(address.fetched_coin_balance, do: address.fetched_coin_balance.value)) end @@ -107,8 +105,6 @@ defmodule BlockScoutWeb.API.V2.AddressView do Map.merge(base_info, %{ "creator_address_hash" => creator_hash && Address.checksum(creator_hash), "creation_transaction_hash" => creation_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `creation_transaction_hash` property - "creation_tx_hash" => creation_transaction_hash, "token" => token, "coin_balance" => balance, "exchange_rate" => exchange_rate, @@ -123,7 +119,10 @@ defmodule BlockScoutWeb.API.V2.AddressView do }) extended_info - |> chain_type_fields(%{address: creation_transaction && creation_transaction.from_address, field_prefix: "creator"}) + |> chain_type_fields(%{ + address: address, + creation_transaction_from_address: creation_transaction && creation_transaction.from_address + }) end @spec prepare_token_balance(Chain.Address.TokenBalance.t(), boolean()) :: map() @@ -243,11 +242,24 @@ defmodule BlockScoutWeb.API.V2.AddressView do }) end + @spec chain_type_fields( + map(), + %{address: Address.t(), creation_transaction_from_address: Address.t()} + ) :: map() case @chain_type do :filecoin -> - defp chain_type_fields(result, params) do + defp chain_type_fields(result, %{creation_transaction_from_address: creation_transaction_from_address}) do + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + BlockScoutWeb.API.V2.FilecoinView.put_filecoin_robust_address(result, %{ + address: creation_transaction_from_address, + field_prefix: "creator" + }) + end + + :zilliqa -> + defp chain_type_fields(result, %{address: address}) do # credo:disable-for-next-line Credo.Check.Design.AliasUsage - BlockScoutWeb.API.V2.FilecoinView.put_filecoin_robust_address(result, params) + BlockScoutWeb.API.V2.ZilliqaView.extend_address_json_response(result, address) end _ -> diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex index cfa7c1ff6215..d17f0cc8e276 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/advanced_filter_view.ex @@ -73,7 +73,7 @@ defmodule BlockScoutWeb.API.V2.AdvancedFilterView do |> Stream.map(fn advanced_filter -> method_id = case advanced_filter.input do - %{bytes: <>} -> method_id + %{bytes: <>} -> "0x" <> Base.encode16(method_id, case: :lower) _ -> nil end @@ -87,18 +87,21 @@ defmodule BlockScoutWeb.API.V2.AdvancedFilterView do Address.checksum(advanced_filter.from_address_hash), Address.checksum(advanced_filter.to_address_hash), Address.checksum(advanced_filter.created_contract_address_hash), - advanced_filter.value, + decimal_to_string_xsd(advanced_filter.value), if(advanced_filter.type != "coin_transfer", - do: advanced_filter.token_transfer.token.contract_address_hash, + do: Address.checksum(advanced_filter.token_transfer.token.contract_address_hash), + else: nil + ), + if(advanced_filter.type != "coin_transfer", + do: decimal_to_string_xsd(advanced_filter.token_transfer.token.decimals), else: nil ), - if(advanced_filter.type != "coin_transfer", do: advanced_filter.token_transfer.token.decimals, else: nil), if(advanced_filter.type != "coin_transfer", do: advanced_filter.token_transfer.token.symbol, else: nil), advanced_filter.block_number, - advanced_filter.fee, - exchange_rate.usd_value, - opening_price, - closing_price + decimal_to_string_xsd(advanced_filter.fee), + decimal_to_string_xsd(exchange_rate.usd_value), + decimal_to_string_xsd(opening_price), + decimal_to_string_xsd(closing_price) ] end) @@ -178,4 +181,7 @@ defmodule BlockScoutWeb.API.V2.AdvancedFilterView do %{methods: method_ids, tokens: tokens_map} end + + defp decimal_to_string_xsd(nil), do: nil + defp decimal_to_string_xsd(decimal), do: Decimal.to_string(decimal, :xsd) end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex index bdce021b179f..e113e8063255 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/arbitrum_view.ex @@ -1,9 +1,19 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do use BlockScoutWeb, :view + alias BlockScoutWeb.API.V2.ApiView alias BlockScoutWeb.API.V2.Helper, as: APIV2Helper alias Explorer.Chain.{Block, Hash, Transaction, Wei} - alias Explorer.Chain.Arbitrum.{L1Batch, LifecycleTransaction, Reader} + alias Explorer.Chain.Arbitrum.{L1Batch, LifecycleTransaction} + alias Explorer.Chain.Arbitrum.Reader.API.Settlement, as: SettlementReader + + @doc """ + Function to render error\\text responses for GET requests + to `/api/v2/arbitrum/messages/claim/:position` endpoint. + """ + def render("message.json", assigns) do + ApiView.render("message.json", assigns) + end @doc """ Function to render GET requests to `/api/v2/arbitrum/messages/:direction` endpoint. @@ -58,6 +68,40 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do count end + @doc """ + Function to render GET requests to `/api/v2/arbitrum/messages/claim/:message_id` endpoint. + """ + def render("arbitrum_claim_message.json", %{calldata: calldata, address: address}) do + %{ + "calldata" => calldata, + "outbox_address" => address + } + end + + @doc """ + Function to render GET requests to `/api/v2/arbitrum/messages/from-rollup/:msg_id/proof` endpoint. + """ + def render("arbitrum_withdrawals.json", %{withdrawals: withdrawals}) do + withdrawals_out = + withdrawals + |> Enum.map(fn withdraw -> + %{ + "id" => withdraw.message_id, + "status" => withdraw.status, + "caller" => withdraw.caller, + "destination" => withdraw.destination, + "arb_block_number" => withdraw.arb_block_number, + "eth_block_number" => withdraw.eth_block_number, + "l2_timestamp" => withdraw.l2_timestamp, + "callvalue" => withdraw.callvalue, + "data" => withdraw.data, + "token" => withdraw.token + } + end) + + %{items: withdrawals_out} + end + @doc """ Function to render GET requests to `/api/v2/arbitrum/batches/:batch_number` endpoint. """ @@ -359,7 +403,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do out = %{"batch_data_container" => "in_anytrust"} da_info = - with raw_info <- Reader.get_da_info_by_batch_number(batch_number), + with raw_info <- SettlementReader.get_da_info_by_batch_number(batch_number), false <- Enum.empty?(raw_info) do prepare_anytrust_certificate(raw_info) else @@ -386,7 +430,7 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do # members who guaranteed availability of data for the specified timeout. @spec prepare_anytrust_certificate(map()) :: map() defp prepare_anytrust_certificate(da_info) do - keyset = Reader.get_anytrust_keyset(da_info["keyset_hash"]) + keyset = SettlementReader.get_anytrust_keyset(da_info["keyset_hash"]) signers = if Enum.empty?(keyset) do @@ -414,13 +458,11 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do defp generate_celestia_da_info(batch_number) do out = %{"batch_data_container" => "in_celestia"} - da_info = Reader.get_da_info_by_batch_number(batch_number) + da_info = SettlementReader.get_da_info_by_batch_number(batch_number) out |> Map.merge(%{ "height" => Map.get(da_info, "height"), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_commitment" => Map.get(da_info, "transaction_commitment"), "transaction_commitment" => Map.get(da_info, "transaction_commitment") }) end @@ -564,6 +606,8 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do end # Determines the associated L1 transaction and its status for the given message direction. + # TODO: it's need to take into account the tx on L2 may initiate several withdrawals. + # The current architecture doesn't support that. @spec l1_transaction_and_status_for_message( %{ :__struct__ => Transaction, @@ -603,7 +647,11 @@ defmodule BlockScoutWeb.API.V2.ArbitrumView do end end - %{"associated_l1_transaction" => l1_transaction, "message_status" => status} + %{ + "message_id" => APIV2Helper.get_2map_data(arbitrum_transaction, :arbitrum_message_from_l2, :message_id), + "associated_l1_transaction" => l1_transaction, + "message_status" => status + } end # Extends the output JSON with information from Arbitrum-specific fields of the transaction. diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex index 35152fd16588..440476f70ac2 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/block_view.ex @@ -38,8 +38,6 @@ defmodule BlockScoutWeb.API.V2.BlockView do "height" => block.number, "timestamp" => block.timestamp, "transaction_count" => count_transactions(block), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => count_transactions(block), "miner" => Helper.address_with_info(nil, block.miner, block.miner_hash, false), "size" => block.size, "hash" => block.hash, @@ -61,8 +59,6 @@ defmodule BlockScoutWeb.API.V2.BlockView do "burnt_fees_percentage" => burnt_fees_percentage(burnt_fees, transaction_fees), "type" => block |> BlockView.block_type() |> String.downcase(), "transaction_fees" => transaction_fees, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_fees` property - "tx_fees" => transaction_fees, "withdrawals_count" => count_withdrawals(block) } |> chain_type_fields(block, single_block?) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/celo_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/celo_view.ex index 2f17c7bd84a3..abe7a9b3bd6b 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/celo_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/celo_view.ex @@ -248,15 +248,6 @@ defmodule BlockScoutWeb.API.V2.CeloView do } end - # Convert the burn fraction from FixidityLib value to decimal. - @spec burn_fraction_decimal(integer()) :: Decimal.t() - defp burn_fraction_decimal(burn_fraction_fixidity_lib) - when is_integer(burn_fraction_fixidity_lib) do - base = Decimal.new(1, 10, 24) - fraction = Decimal.new(1, burn_fraction_fixidity_lib, 0) - Decimal.div(fraction, base) - end - # Get the breakdown of the base fee for the case when FeeHandler is a contract # that receives the base fee. @spec fee_handler_base_fee_breakdown(Wei.t(), Block.block_number()) :: @@ -280,7 +271,7 @@ defmodule BlockScoutWeb.API.V2.CeloView do {:ok, %{"value" => burn_fraction_fixidity_lib}} <- CeloCoreContracts.get_event(:fee_handler, :burn_fraction_set, block_number), {:ok, celo_token_address_hash} <- CeloCoreContracts.get_address(:celo_token, block_number) do - burn_fraction = burn_fraction_decimal(burn_fraction_fixidity_lib) + burn_fraction = CeloHelper.burn_fraction_decimal(burn_fraction_fixidity_lib) burnt_amount = Wei.mult(base_fee, burn_fraction) burnt_percentage = Decimal.mult(burn_fraction, 100) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/ethereum_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/ethereum_view.ex index 46322420b6d5..e3a14d573fc8 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/ethereum_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/ethereum_view.ex @@ -29,9 +29,13 @@ defmodule BlockScoutWeb.API.V2.EthereumView do blob_gas_used = Map.get(block, :blob_gas_used) excess_blob_gas = Map.get(block, :excess_blob_gas) + blob_transaction_count = count_blob_transactions(block) + extended_out_json = out_json - |> Map.put("blob_tx_count", count_blob_transactions(block)) + # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `blob_transaction_count` property + |> Map.put("blob_tx_count", blob_transaction_count) + |> Map.put("blob_transaction_count", blob_transaction_count) |> Map.put("blob_gas_used", blob_gas_used) |> Map.put("excess_blob_gas", excess_blob_gas) diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex index b2752554f09d..f4c69632f691 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/helper.ex @@ -146,7 +146,8 @@ defmodule BlockScoutWeb.API.V2.Helper do def address_name(%Address{names: [_ | _] = address_names}) do case Enum.find(address_names, &(&1.primary == true)) do nil -> - %Address.Name{name: name} = Enum.at(address_names, 0) + # take last created address name, if there is no `primary` one. + %Address.Name{name: name} = Enum.max_by(address_names, & &1.id) name %Address.Name{name: name} -> diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/internal_transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/internal_transaction_view.ex index 6ac34a0e0bad..d67b9f55fe0d 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/internal_transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/internal_transaction_view.ex @@ -48,8 +48,6 @@ defmodule BlockScoutWeb.API.V2.InternalTransactionView do false ), "value" => internal_transaction.value, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `block_number` property - "block" => internal_transaction.block_number, "block_number" => internal_transaction.block_number, "timestamp" => (block && block.timestamp) || internal_transaction.block.timestamp, "index" => internal_transaction.index, diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex index 488425cb4b11..aa66c549681a 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/mud_view.ex @@ -83,8 +83,6 @@ defmodule BlockScoutWeb.API.V2.MudView do defp prepare_world_for_list(%Address{} = address) do %{ "address" => Helper.address_with_info(address, address.hash), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => address.transactions_count, "transaction_count" => address.transactions_count, "coin_balance" => if(address.fetched_coin_balance, do: address.fetched_coin_balance.value) } diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/optimism_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/optimism_view.ex index 2c68d4425abe..dc01ad551563 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/optimism_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/optimism_view.ex @@ -36,11 +36,7 @@ defmodule BlockScoutWeb.API.V2.OptimismView do %{ "l2_block_number" => batch.l2_block_number, "transaction_count" => transaction_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => transaction_count, "l1_transaction_hashes" => batch.frame_sequence.l1_transaction_hashes, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hashes` property - "l1_tx_hashes" => batch.frame_sequence.l1_transaction_hashes, "l1_timestamp" => batch.frame_sequence.l1_timestamp } end) @@ -97,8 +93,6 @@ defmodule BlockScoutWeb.API.V2.OptimismView do "l2_output_index" => r.l2_output_index, "l2_block_number" => r.l2_block_number, "l1_transaction_hash" => r.l1_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hash` property - "l1_tx_hash" => r.l1_transaction_hash, "l1_timestamp" => r.l1_timestamp, "l1_block_number" => r.l1_block_number, "output_root" => r.output_root @@ -154,16 +148,8 @@ defmodule BlockScoutWeb.API.V2.OptimismView do %{ "l1_block_number" => deposit.l1_block_number, "l2_transaction_hash" => deposit.l2_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l2_transaction_hash` property - "l2_tx_hash" => deposit.l2_transaction_hash, "l1_block_timestamp" => deposit.l1_block_timestamp, "l1_transaction_hash" => deposit.l1_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hash` property - "l1_tx_hash" => deposit.l1_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_origin` property - "l1_tx_origin" => deposit.l1_transaction_origin, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l2_transaction_gas_limit` property - "l2_tx_gas_limit" => deposit.l2_transaction.gas, "l1_transaction_origin" => deposit.l1_transaction_origin, "l2_transaction_gas_limit" => deposit.l2_transaction.gas } @@ -180,10 +166,6 @@ defmodule BlockScoutWeb.API.V2.OptimismView do %{ "l1_block_number" => deposit.l1_block_number, "l1_block_timestamp" => deposit.l1_block_timestamp, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hash` property - "l1_tx_hash" => deposit.l1_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l2_transaction_hash` property - "l2_tx_hash" => deposit.l2_transaction_hash, "l1_transaction_hash" => deposit.l1_transaction_hash, "l2_transaction_hash" => deposit.l2_transaction_hash } @@ -239,13 +221,9 @@ defmodule BlockScoutWeb.API.V2.OptimismView do "msg_nonce_version" => msg_nonce_version, "from" => Helper.address_with_info(conn, from_address, from_address_hash, w.from), "l2_transaction_hash" => w.l2_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l2_transaction_hash` property - "l2_tx_hash" => w.l2_transaction_hash, "l2_timestamp" => w.l2_timestamp, "status" => status, "l1_transaction_hash" => w.l1_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hash` property - "l1_tx_hash" => w.l1_transaction_hash, "challenge_period_end" => challenge_period_end } end), @@ -288,11 +266,7 @@ defmodule BlockScoutWeb.API.V2.OptimismView do :l2_block_start => non_neg_integer(), :l2_block_end => non_neg_integer(), :transaction_count => non_neg_integer(), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - :tx_count => non_neg_integer(), :l1_transaction_hashes => list(), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hashes` property - :l1_tx_hashes => list(), :batch_data_container => :in_blob4844 | :in_celestia | :in_calldata | nil } defp render_base_info_for_batch(internal_id, l2_block_number_from, l2_block_number_to, transaction_count, batch) do @@ -336,8 +310,6 @@ defmodule BlockScoutWeb.API.V2.OptimismView do "internal_id" => frame_sequence.id, "l1_timestamp" => frame_sequence.l1_timestamp, "l1_transaction_hashes" => frame_sequence.l1_transaction_hashes, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hashes` property - "l1_tx_hashes" => frame_sequence.l1_transaction_hashes, "batch_data_container" => batch_data_container } |> extend_batch_info_by_blobs(blobs, "blobs") diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex index 0960ca7943ae..5a9f68acc0ca 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/polygon_zkevm_view.ex @@ -33,10 +33,6 @@ defmodule BlockScoutWeb.API.V2.PolygonZkevmView do "acc_input_hash" => batch.acc_input_hash, "sequence_transaction_hash" => sequence_transaction_hash, "verify_transaction_hash" => verify_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `sequence_transaction_hash` property - "sequence_tx_hash" => sequence_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `verify_transaction_hash` property - "verify_tx_hash" => verify_transaction_hash, "state_root" => batch.state_root } end @@ -160,12 +156,6 @@ defmodule BlockScoutWeb.API.V2.PolygonZkevmView do "status" => batch_status(batch), "timestamp" => batch.timestamp, "transaction_count" => batch.l2_transactions_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => batch.l2_transactions_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `sequence_transaction_hash` property - "sequence_tx_hash" => sequence_transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `verify_transaction_hash` property - "verify_tx_hash" => verify_transaction_hash, "sequence_transaction_hash" => sequence_transaction_hash, "verify_transaction_hash" => verify_transaction_hash } diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex index dbda49f01570..b9c3034e2bf4 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/search_view.ex @@ -5,11 +5,12 @@ defmodule BlockScoutWeb.API.V2.SearchView do alias BlockScoutWeb.{BlockView, Endpoint} alias Explorer.Chain alias Explorer.Chain.{Address, Beacon.Blob, Block, Hash, Transaction, UserOperation} + alias Plug.Conn.Query def render("search_results.json", %{search_results: search_results, next_page_params: next_page_params}) do %{ "items" => search_results |> Enum.map(&prepare_search_result/1) |> chain_type_fields(), - "next_page_params" => next_page_params + "next_page_params" => next_page_params |> encode_next_page_params() } end @@ -41,7 +42,7 @@ defmodule BlockScoutWeb.API.V2.SearchView do "circulating_market_cap" => search_result.circulating_market_cap && to_string(search_result.circulating_market_cap), "is_verified_via_admin_panel" => search_result.is_verified_via_admin_panel, - "certified" => if(search_result.certified, do: search_result.certified, else: false), + "certified" => search_result.certified || false, "priority" => search_result.priority } end @@ -101,8 +102,6 @@ defmodule BlockScoutWeb.API.V2.SearchView do %{ "type" => search_result.type, "transaction_hash" => transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_hash` property - "tx_hash" => transaction_hash, "url" => transaction_path(Endpoint, :show, transaction_hash), "timestamp" => search_result.timestamp, "priority" => search_result.priority @@ -178,4 +177,21 @@ defmodule BlockScoutWeb.API.V2.SearchView do result end end + + defp encode_next_page_params(next_page_params) when is_map(next_page_params) do + result = + next_page_params + |> Query.encode() + |> URI.decode_query() + |> Enum.map(fn {k, v} -> + {k, unless(v == "", do: v)} + end) + |> Enum.into(%{}) + + unless result == %{} do + result + end + end + + defp encode_next_page_params(next_page_params), do: next_page_params end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex index 7bf5670579e5..5cbb42885aef 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/smart_contract_view.ex @@ -231,11 +231,14 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do "is_blueprint" => if(smart_contract.is_blueprint, do: smart_contract.is_blueprint, else: false) } |> Map.merge(bytecode_info(address)) - |> chain_type_fields(%{ - address_hash: verified_twin_address_hash, - field_prefix: "verified_twin", - target_contract: target_contract - }) + |> chain_type_fields( + %{ + address_hash: verified_twin_address_hash, + field_prefix: "verified_twin", + target_contract: target_contract + }, + true + ) end def prepare_smart_contract(%Address{proxy_implementations: implementations} = address, conn) do @@ -347,8 +350,6 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do "compiler_version" => smart_contract.compiler_version, "optimization_enabled" => smart_contract.optimization, "transaction_count" => smart_contract.address.transactions_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => smart_contract.address.transactions_count, "language" => smart_contract_language(smart_contract), "verified_at" => smart_contract.inserted_at, "market_cap" => token && token.circulating_market_cap, @@ -360,9 +361,10 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do } smart_contract_info - |> chain_type_fields(%{ - target_contract: smart_contract - }) + |> chain_type_fields( + %{target_contract: smart_contract}, + false + ) end defp smart_contract_language(smart_contract) do @@ -446,26 +448,29 @@ defmodule BlockScoutWeb.API.V2.SmartContractView do case @chain_type do :filecoin -> - defp chain_type_fields(result, params) do + defp chain_type_fields(result, params, true) do # credo:disable-for-next-line Credo.Check.Design.AliasUsage BlockScoutWeb.API.V2.FilecoinView.preload_and_put_filecoin_robust_address(result, params) end + defp chain_type_fields(result, _params, false), + do: result + :arbitrum -> - defp chain_type_fields(result, %{target_contract: target_contract}) do + defp chain_type_fields(result, %{target_contract: target_contract}, _single?) do result |> Map.put("package_name", target_contract.package_name) |> Map.put("github_repository_metadata", target_contract.github_repository_metadata) end :zksync -> - defp chain_type_fields(result, %{target_contract: target_contract}) do + defp chain_type_fields(result, %{target_contract: target_contract}, _single?) do result |> Map.put("zk_compiler_version", target_contract.zk_compiler_version) end _ -> - defp chain_type_fields(result, _address) do + defp chain_type_fields(result, _params, _single?) do result end end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_transfer_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_transfer_view.ex index 6d4ec1d001ef..4e530664e6e4 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_transfer_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_transfer_view.ex @@ -46,8 +46,6 @@ defmodule BlockScoutWeb.API.V2.TokenTransferView do def prepare_token_transfer(token_transfer, _conn, decoded_input) do %{ "transaction_hash" => token_transfer.transaction_hash, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_hash` property - "tx_hash" => token_transfer.transaction_hash, "from" => Helper.address_with_info(nil, token_transfer.from_address, token_transfer.from_address_hash, false), "to" => Helper.address_with_info(nil, token_transfer.to_address, token_transfer.to_address_hash, false), "total" => prepare_token_transfer_total(token_transfer), diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_view.ex index 72b1ad715199..143a82aa7f08 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/token_view.ex @@ -114,7 +114,10 @@ defmodule BlockScoutWeb.API.V2.TokenView do "external_app_url" => NFTHelper.external_url(instance), "animation_url" => instance.metadata && NFTHelper.retrieve_image(instance.metadata["animation_url"]), "image_url" => instance.metadata && NFTHelper.get_media_src(instance.metadata, false), - "is_unique" => instance.is_unique + "is_unique" => instance.is_unique, + "thumbnails" => instance.thumbnails, + "media_type" => instance.media_type, + "media_url" => Instance.get_media_url_from_metadata_for_nft_media_handler(instance.metadata) } end diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex index 739271dbbc2b..27040ea6cfcb 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex @@ -258,8 +258,6 @@ defmodule BlockScoutWeb.API.V2.TransactionView do %{ "transaction_hash" => get_transaction_hash(transaction_or_hash), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_hash` property - "tx_hash" => get_transaction_hash(transaction_or_hash), "address" => Helper.address_with_info(nil, log.address, log.address_hash, tags_for_address_needed?), "topics" => [ log.first_topic, @@ -374,8 +372,6 @@ defmodule BlockScoutWeb.API.V2.TransactionView do "result" => status, "status" => transaction.status, "block_number" => transaction.block_number, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `block_number` property - "block" => transaction.block_number, "timestamp" => block_timestamp(transaction), "from" => Helper.address_with_info( @@ -413,7 +409,9 @@ defmodule BlockScoutWeb.API.V2.TransactionView do "max_priority_fee_per_gas" => transaction.max_priority_fee_per_gas, "base_fee_per_gas" => base_fee_per_gas, "priority_fee" => priority_fee_per_gas && Wei.mult(priority_fee_per_gas, transaction.gas_used), + # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_burnt_fee` property "tx_burnt_fee" => burnt_fees, + "transaction_burnt_fee" => burnt_fees, "nonce" => transaction.nonce, "position" => transaction.index, "revert_reason" => revert_reason, @@ -425,14 +423,8 @@ defmodule BlockScoutWeb.API.V2.TransactionView do "exchange_rate" => Market.get_coin_exchange_rate().usd_value, "method" => Transaction.method_name(transaction, decoded_input), "transaction_types" => transaction_types(transaction), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_types` property - "tx_types" => transaction_types(transaction), "transaction_tag" => GetTransactionTags.get_transaction_tags(transaction.hash, current_user(single_transaction? && conn)), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_tag` property - "tx_tag" => GetTransactionTags.get_transaction_tags(transaction.hash, current_user(single_transaction? && conn)), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `has_error_in_internal_transactions` property - "has_error_in_internal_txs" => transaction.has_error_in_internal_transactions, "has_error_in_internal_transactions" => transaction.has_error_in_internal_transactions, "authorization_list" => authorization_list(transaction.signed_authorizations), "near_receipt_hash" => transaction.near_receipt_hash, @@ -917,6 +909,16 @@ defmodule BlockScoutWeb.API.V2.TransactionView do BlockScoutWeb.API.V2.CeloView.extend_transaction_json_response(result, transaction) end + :zilliqa -> + defp chain_type_transformations(transactions) do + transactions + end + + defp chain_type_fields(result, transaction, _single_tx?, _conn, _watchlist_names) do + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + BlockScoutWeb.API.V2.ZilliqaView.extend_transaction_json_response(result, transaction) + end + _ -> defp chain_type_transformations(transactions) do transactions diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zilliqa_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zilliqa_view.ex index 238d2c72ab3c..07c941acab97 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zilliqa_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zilliqa_view.ex @@ -6,7 +6,8 @@ defmodule BlockScoutWeb.API.V2.ZilliqaView do if @chain_type == :zilliqa do # TODO: remove when https://github.com/elixir-lang/elixir/issues/13975 comes to elixir release - alias Explorer.Chain.Block, warn: false + import Explorer.Chain.Zilliqa.Helper, only: [scilla_transaction?: 1], warn: false + alias Explorer.Chain.{Address, Block, Transaction}, warn: false alias Explorer.Chain.Zilliqa.{AggregateQuorumCertificate, QuorumCertificate}, warn: false @doc """ @@ -34,6 +35,55 @@ defmodule BlockScoutWeb.API.V2.ZilliqaView do Map.put(out_json, :zilliqa, zilliqa_json) end + @doc """ + Extends the JSON output with a sub-map containing information related to Zilliqa, + such as if the transaction is a Scilla transaction. + + ## Parameters + - `out_json`: A map defining the output JSON which will be extended. + - `transaction`: The transaction structure. + + ## Returns + - A map extended with data related to Zilliqa. + """ + @spec extend_transaction_json_response(map(), Transaction.t()) :: map() + def extend_transaction_json_response(out_json, %Transaction{} = transaction) do + Map.put(out_json, :zilliqa, %{ + is_scilla: scilla_transaction?(transaction) + }) + end + + @doc """ + Extends the JSON output with a sub-map containing information related to + Zilliqa, such as if the address is a Scilla smart contract. + + ## Parameters + - `out_json`: A map defining the output JSON which will be extended. + - `address`: The address structure. + + ## Returns + - A map extended with data related to Zilliqa. + """ + @spec extend_address_json_response(map(), Address.t()) :: map() + def extend_address_json_response(out_json, %Address{} = address) do + is_scilla_contract = + case address do + %Address{ + contracts_creation_transaction: transaction + } -> + scilla_transaction?(transaction) + + _ -> + false + end + + Map.put(out_json, :zilliqa, %{ + is_scilla_contract: is_scilla_contract + }) + end + + def extend_address_json_response(out_json, _address), do: out_json + @spec add_quorum_certificate(map(), Block.t()) :: map() defp add_quorum_certificate( zilliqa_json, diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex index b2eeb59c7b03..764f23f5600e 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/zksync_view.ex @@ -17,10 +17,6 @@ defmodule BlockScoutWeb.API.V2.ZkSyncView do "root_hash" => batch.root_hash, "l1_transaction_count" => batch.l1_transaction_count, "l2_transaction_count" => batch.l2_transaction_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_count` property - "l1_tx_count" => batch.l1_transaction_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l2_transaction_count` property - "l2_tx_count" => batch.l2_transaction_count, "l1_gas_price" => batch.l1_gas_price, "l2_fair_gas_price" => batch.l2_fair_gas_price, "start_block" => batch.start_block, @@ -68,8 +64,6 @@ defmodule BlockScoutWeb.API.V2.ZkSyncView do %{ "number" => batch.number, "timestamp" => batch.timestamp, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - "tx_count" => batch.l1_transaction_count + batch.l2_transaction_count, "transaction_count" => batch.l1_transaction_count + batch.l2_transaction_count } |> add_l1_transactions_info_and_status(batch) diff --git a/apps/block_scout_web/lib/block_scout_web/views/script_helper.ex b/apps/block_scout_web/lib/block_scout_web/views/script_helper.ex index 7c98de57c7b9..f3d0c6cc43e8 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/script_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/script_helper.ex @@ -3,16 +3,22 @@ defmodule BlockScoutWeb.Views.ScriptHelper do Helper for rendering view specific script tags. """ - import Phoenix.HTML, only: [sigil_E: 2] + import Phoenix.LiveView.Helpers, only: [sigil_H: 2] import BlockScoutWeb.Router.Helpers, only: [static_path: 2] + alias Phoenix.HTML.Safe + def render_scripts(conn, file_names) do conn |> files(file_names) |> Enum.map(fn file -> - ~E""" - + assigns = %{file: file} + + ~H""" + """ + |> Safe.to_iodata() + |> List.to_string() end) end diff --git a/apps/block_scout_web/lib/block_scout_web/views/tokens/instance/overview_view.ex b/apps/block_scout_web/lib/block_scout_web/views/tokens/instance/overview_view.ex index 640b34fa0d82..923dc7777abb 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/tokens/instance/overview_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/tokens/instance/overview_view.ex @@ -5,6 +5,7 @@ defmodule BlockScoutWeb.Tokens.Instance.OverviewView do alias Explorer.Chain alias Explorer.Chain.{Address, CurrencyHelper, SmartContract, Token} alias Explorer.SmartContract.Helper + alias Utils.TokenInstanceHelper import BlockScoutWeb.APIDocsView, only: [blockscout_url: 1] import BlockScoutWeb.NFTHelper, only: [external_url: 1] @@ -28,54 +29,13 @@ defmodule BlockScoutWeb.Tokens.Instance.OverviewView do NFTHelper.get_media_src(instance.metadata, high_quality_media?) || media_src(nil) end - def media_type("data:image/" <> _data) do - "image" - end - - def media_type("data:video/" <> _data) do - "video" - end - - def media_type("data:" <> _data) do - nil - end - - def media_type(media_src) when not is_nil(media_src) do - ext = media_src |> Path.extname() |> String.trim() - - mime_type = - if ext == "" do - process_missing_extension(media_src) - else - ext_with_dot = - media_src - |> Path.extname() - - "." <> ext = ext_with_dot - - ext - |> MIME.type() - end - - if mime_type do - basic_mime_type = mime_type |> String.split("/") |> Enum.at(0) - - basic_mime_type - else - nil - end - end - - def media_type(nil), do: nil - - defp process_missing_extension(media_src) do - case HTTPoison.head(media_src, [], follow_redirect: true) do - {:ok, %HTTPoison.Response{status_code: 200, headers: headers}} -> - headers_map = Map.new(headers, fn {key, value} -> {String.downcase(key), value} end) - headers_map["content-type"] + def media_type(media_src) do + case TokenInstanceHelper.media_type(media_src) do + {type, _} -> + type - _ -> - nil + other -> + other end end diff --git a/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex index 81b1b06ad148..d2bd66961a1f 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/transaction_view.ex @@ -599,14 +599,6 @@ defmodule BlockScoutWeb.TransactionView do %{show: String.slice(string, 0..length), hide: String.slice(string, (length + 1)..-1//1)} end - defp template_to_string(template) when is_list(template) do - template_to_string(Enum.at(template, 1)) - end - - defp template_to_string(template) when is_tuple(template) do - safe_to_string(template) - end - # Function decodes revert reason of the transaction @spec decode_revert_reason_as_utf8(binary() | nil) :: binary() | nil def decode_revert_reason_as_utf8(revert_reason) do diff --git a/apps/block_scout_web/mix.exs b/apps/block_scout_web/mix.exs index 412358ec2388..e74ac35caa3b 100644 --- a/apps/block_scout_web/mix.exs +++ b/apps/block_scout_web/mix.exs @@ -14,7 +14,7 @@ defmodule BlockScoutWeb.Mixfile do plt_add_deps: :app_tree, ignore_warnings: "../../.dialyzer-ignore" ], - elixir: "~> 1.13", + elixir: "~> 1.17", elixirc_paths: elixirc_paths(Mix.env(), Application.get_env(:block_scout_web, :disable_api?)), lockfile: "../../mix.lock", package: package(), @@ -23,7 +23,7 @@ defmodule BlockScoutWeb.Mixfile do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.9.2", + version: "6.10.1", xref: [ exclude: [ Explorer.Chain.PolygonZkevm.Reader, @@ -119,8 +119,9 @@ defmodule BlockScoutWeb.Mixfile do {:number, "~> 1.0.1"}, {:phoenix, "== 1.5.14"}, {:phoenix_ecto, "~> 4.1"}, - {:phoenix_html, "== 3.0.4"}, + {:phoenix_html, "== 3.3.4"}, {:phoenix_live_reload, "~> 1.2", only: [:dev]}, + {:phoenix_live_view, "~> 0.17"}, {:phoenix_pubsub, "~> 2.0"}, {:prometheus_ex, git: "https://github.com/lanodan/prometheus.ex", branch: "fix/elixir-1.14", override: true}, # use `:cowboy` for WebServer with `:plug` diff --git a/apps/block_scout_web/test/block_scout_web/controllers/address_transaction_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/address_transaction_controller_test.exs index 873597f0b0c6..afd0f420acb7 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/address_transaction_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/address_transaction_controller_test.exs @@ -186,7 +186,7 @@ defmodule BlockScoutWeb.AddressTransactionControllerTest do "to_period" => to_period }) - assert conn.status == 404 + assert conn.status == 403 end test "do not export token transfers to csv without recaptcha passed", %{ @@ -221,7 +221,7 @@ defmodule BlockScoutWeb.AddressTransactionControllerTest do "recaptcha_response" => "123" }) - assert conn.status == 404 + assert conn.status == 403 end test "exports token transfers to csv without recaptcha if recaptcha is disabled", %{conn: conn} do diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/contract_controller_test.exs index 94e217d62efd..6565795d9fa4 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/rpc/contract_controller_test.exs @@ -1,9 +1,12 @@ defmodule BlockScoutWeb.API.RPC.ContractControllerTest do use BlockScoutWeb.ConnCase - alias Explorer.{Chain, TestHelper} - alias Explorer.Chain.{Address, SmartContract} import Mox + import Ecto.Query + + alias Explorer.{Repo, TestHelper} + alias Explorer.Chain.SmartContract.Proxy.Models.Implementation + alias Explorer.Chain.{Address, SmartContract} setup :verify_on_exit! @@ -732,23 +735,13 @@ defmodule BlockScoutWeb.API.RPC.ContractControllerTest do abi: proxy_abi ) - transaction = - insert(:transaction, - created_contract_address_hash: proxy_address.hash, - input: proxy_transaction_input - ) - |> with_block(status: :ok) + insert(:transaction, + created_contract_address_hash: proxy_address.hash, + input: proxy_transaction_input + ) + |> with_block(status: :ok) name = implementation_contract.name - from = Address.checksum(transaction.from_address_hash) - transaction_hash = to_string(transaction.hash) - address_hash = Address.checksum(proxy_address.hash) - - {:ok, implementation_contract_address_hash} = - Chain.string_to_address_hash("0x" <> implementation_contract_address_hash_string) - - checksummed_implementation_contract_address_hash = - implementation_contract_address_hash && Address.checksum(implementation_contract_address_hash) insert(:proxy_implementation, proxy_address_hash: proxy_address.hash, @@ -1184,6 +1177,198 @@ defmodule BlockScoutWeb.API.RPC.ContractControllerTest do end end + describe "verifyproxycontract & checkproxyverification" do + setup do + %{params: %{"module" => "contract"}} + end + + @proxy_abi [ + %{ + "type" => "function", + "stateMutability" => "nonpayable", + "payable" => false, + "outputs" => [%{"type" => "bool", "name" => ""}], + "name" => "upgradeTo", + "inputs" => [%{"type" => "address", "name" => "newImplementation"}], + "constant" => false + }, + %{ + "type" => "function", + "stateMutability" => "view", + "payable" => false, + "outputs" => [%{"type" => "uint256", "name" => ""}], + "name" => "version", + "inputs" => [], + "constant" => true + }, + %{ + "type" => "function", + "stateMutability" => "view", + "payable" => false, + "outputs" => [%{"type" => "address", "name" => ""}], + "name" => "implementation", + "inputs" => [], + "constant" => true + }, + %{ + "type" => "function", + "stateMutability" => "nonpayable", + "payable" => false, + "outputs" => [], + "name" => "renounceOwnership", + "inputs" => [], + "constant" => false + }, + %{ + "type" => "function", + "stateMutability" => "view", + "payable" => false, + "outputs" => [%{"type" => "address", "name" => ""}], + "name" => "getOwner", + "inputs" => [], + "constant" => true + }, + %{ + "type" => "function", + "stateMutability" => "view", + "payable" => false, + "outputs" => [%{"type" => "address", "name" => ""}], + "name" => "getProxyStorage", + "inputs" => [], + "constant" => true + }, + %{ + "type" => "function", + "stateMutability" => "nonpayable", + "payable" => false, + "outputs" => [], + "name" => "transferOwnership", + "inputs" => [%{"type" => "address", "name" => "_newOwner"}], + "constant" => false + }, + %{ + "type" => "constructor", + "stateMutability" => "nonpayable", + "payable" => false, + "inputs" => [ + %{"type" => "address", "name" => "_proxyStorage"}, + %{"type" => "address", "name" => "_implementationAddress"} + ] + }, + %{"type" => "fallback", "stateMutability" => "nonpayable", "payable" => false}, + %{ + "type" => "event", + "name" => "Upgraded", + "inputs" => [ + %{"type" => "uint256", "name" => "version", "indexed" => false}, + %{"type" => "address", "name" => "implementation", "indexed" => true} + ], + "anonymous" => false + }, + %{ + "type" => "event", + "name" => "OwnershipRenounced", + "inputs" => [%{"type" => "address", "name" => "previousOwner", "indexed" => true}], + "anonymous" => false + }, + %{ + "type" => "event", + "name" => "OwnershipTransferred", + "inputs" => [ + %{"type" => "address", "name" => "previousOwner", "indexed" => true}, + %{"type" => "address", "name" => "newOwner", "indexed" => true} + ], + "anonymous" => false + } + ] + @implementation_abi [ + %{ + "constant" => false, + "inputs" => [%{"name" => "x", "type" => "uint256"}], + "name" => "set", + "outputs" => [], + "payable" => false, + "stateMutability" => "nonpayable", + "type" => "function" + }, + %{ + "constant" => true, + "inputs" => [], + "name" => "get", + "outputs" => [%{"name" => "", "type" => "uint256"}], + "payable" => false, + "stateMutability" => "view", + "type" => "function" + } + ] + test "verify", %{conn: conn, params: params} do + proxy_contract_address = insert(:contract_address) + + insert(:smart_contract, address_hash: proxy_contract_address.hash, abi: @proxy_abi, contract_code_md5: "123") + + implementation_contract_address = insert(:contract_address) + + insert(:smart_contract, + address_hash: implementation_contract_address.hash, + abi: @implementation_abi, + contract_code_md5: "123" + ) + + implementation_contract_address_hash_string = + Base.encode16(implementation_contract_address.hash.bytes, case: :lower) + + TestHelper.get_eip1967_implementation_zero_addresses() + + expect( + EthereumJSONRPC.Mox, + :json_rpc, + fn [%{id: id, method: _, params: [%{data: _, to: _}, _]}], _options -> + {:ok, + [ + %{ + id: id, + jsonrpc: "2.0", + result: "0x000000000000000000000000" <> implementation_contract_address_hash_string + } + ]} + end + ) + + %{ + "message" => "OK", + "result" => uid, + "status" => "1" + } = + conn + |> get( + "/api", + Map.merge(params, %{"action" => "verifyproxycontract", "address" => to_string(proxy_contract_address.hash)}) + ) + |> json_response(200) + + :timer.sleep(333) + + result = + "The proxy's (#{to_string(proxy_contract_address.hash)}) implementation contract is found at #{to_string(implementation_contract_address.hash)} and is successfully updated." + + %{ + "message" => "OK", + "result" => ^result, + "status" => "1" + } = + conn + |> get("/api", Map.merge(params, %{"action" => "checkproxyverification", "guid" => uid})) + |> json_response(200) + + assert %Implementation{address_hashes: implementations} = + Implementation + |> where([i], i.proxy_address_hash == ^proxy_contract_address.hash) + |> Repo.one() + + assert implementations == [implementation_contract_address.hash] + end + end + defp listcontracts_schema do resolve_schema(%{ "type" => ["array", "null"], diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v1/health_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v1/health_controller_test.exs index 54197c5866e1..3f7cb670738b 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v1/health_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v1/health_controller_test.exs @@ -12,30 +12,42 @@ defmodule BlockScoutWeb.API.V1.HealthControllerTest do describe "GET last_block_status/0" do test "returns error when there are no blocks in db", %{conn: conn} do - request = get(conn, api_v1_health_path(conn, :health)) + request = get(conn, api_health_path(conn, :health)) assert request.status == 500 - assert request.resp_body == - "{\"error_code\":5002,\"error_description\":\"There are no blocks in the DB\",\"error_title\":\"no blocks in db\",\"healthy\":false}" + expected_response = + %{ + healthy: false, + error: %{ + code: 5002, + message: "There are no blocks in the DB." + } + } + |> Jason.encode!() + + assert request.resp_body == expected_response end test "returns error when last block is stale", %{conn: conn} do insert(:block, consensus: true, timestamp: Timex.shift(DateTime.utc_now(), hours: -50)) - request = get(conn, api_v1_health_path(conn, :health)) + request = get(conn, api_health_path(conn, :health)) assert request.status == 500 assert %{ "healthy" => false, - "error_code" => 5001, - "error_title" => "blocks fetching is stuck", - "error_description" => - "There are no new blocks in the DB for the last 5 mins. Check the healthiness of Ethereum archive node or the Blockscout DB instance", - "data" => %{ - "latest_block_number" => _, - "latest_block_inserted_at" => _ + "error" => %{ + "code" => 5001, + "message" => + "There are no new blocks in the DB for the last 5 mins. Check the healthiness of the JSON RPC archive node or the DB." + }, + "metadata" => %{ + "latest_block" => %{ + "number" => _, + "timestamp" => _ + } } } = Poison.decode!(request.resp_body) end @@ -44,7 +56,7 @@ defmodule BlockScoutWeb.API.V1.HealthControllerTest do block1 = insert(:block, consensus: true, timestamp: DateTime.utc_now(), number: 2) insert(:block, consensus: true, timestamp: DateTime.utc_now(), number: 1) - request = get(conn, api_v1_health_path(conn, :health)) + request = get(conn, api_health_path(conn, :health)) assert request.status == 200 @@ -53,11 +65,17 @@ defmodule BlockScoutWeb.API.V1.HealthControllerTest do assert result["healthy"] == true assert %{ - "latest_block_number" => to_string(block1.number), - "latest_block_inserted_at" => to_string(block1.timestamp), - "cache_latest_block_number" => to_string(block1.number), - "cache_latest_block_inserted_at" => to_string(block1.timestamp) - } == result["data"] + "latest_block" => %{ + "db" => %{ + "number" => to_string(block1.number), + "timestamp" => to_string(block1.timestamp) + }, + "cache" => %{ + "number" => to_string(block1.number), + "timestamp" => to_string(block1.timestamp) + } + } + } == result["metadata"] end end @@ -71,24 +89,27 @@ defmodule BlockScoutWeb.API.V1.HealthControllerTest do assert [%{hash: ^state_block_hash}] = Chain.list_blocks(paging_options: %PagingOptions{page_size: 1}) - request = get(conn, api_v1_health_path(conn, :health)) + request = get(conn, api_health_path(conn, :health)) assert request.status == 500 assert %{ "healthy" => false, - "error_code" => 5001, - "error_title" => "blocks fetching is stuck", - "error_description" => - "There are no new blocks in the DB for the last 5 mins. Check the healthiness of Ethereum archive node or the Blockscout DB instance", - "data" => %{ - "latest_block_number" => _, - "latest_block_inserted_at" => _ + "error" => %{ + "code" => 5001, + "message" => + "There are no new blocks in the DB for the last 5 mins. Check the healthiness of the JSON RPC archive node or the DB." + }, + "metadata" => %{ + "latest_block" => %{ + "number" => _, + "timestamp" => _ + } } } = Poison.decode!(request.resp_body) end - defp api_v1_health_path(conn, action) do - "/api" <> ApiRoutes.api_v1_health_path(conn, action) + defp api_health_path(conn, action) do + "/api" <> ApiRoutes.health_path(conn, action) end end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/search_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/search_controller_test.exs index 2f66211bb46f..d567aa4edc1a 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/search_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/search_controller_test.exs @@ -2,19 +2,8 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do use BlockScoutWeb.ConnCase alias Explorer.Chain.{Address, Block} - alias Explorer.Repo alias Explorer.Tags.AddressTag - - setup do - insert(:block) - insert(:unique_smart_contract) - insert(:unique_token) - insert(:transaction) - address = insert(:address) - insert(:unique_address_name, address: address) - - :ok - end + alias Plug.Conn.Query describe "/search" do test "search block", %{conn: conn} do @@ -49,6 +38,24 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do assert item["timestamp"] == block.timestamp |> to_string() |> String.replace(" ", "T") end + test "search block with small and short number", %{conn: conn} do + block = insert(:block, number: 1) + + request = get(conn, "/api/v2/search?q=#{block.number}") + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 1 + assert response["next_page_params"] == nil + + item = Enum.at(response["items"], 0) + + assert item["type"] == "block" + assert item["block_number"] == block.number + assert item["block_hash"] == to_string(block.hash) + assert item["url"] =~ to_string(block.hash) + assert item["timestamp"] == block.timestamp |> to_string() |> String.replace(" ", "T") + end + test "search reorg", %{conn: conn} do block = insert(:block, consensus: false) @@ -133,7 +140,7 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do assert item["type"] == "contract" assert item["name"] == name - request_2 = get(conn, "/api/v2/search", response["next_page_params"]) + request_2 = get(conn, "/api/v2/search", response["next_page_params"] |> Query.encode() |> Query.decode()) assert response_2 = json_response(request_2, 200) assert Enum.count(response_2["items"]) == 1 @@ -147,6 +154,152 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do assert item not in response["items"] end + test "check pagination #1", %{conn: conn} do + name = "contract" + contracts = for(i <- 0..50, do: insert(:smart_contract, name: "#{name} #{i}")) |> Enum.sort_by(fn x -> x.name end) + + tokens = + for i <- 0..50, do: insert(:token, name: "#{name} #{i}", circulating_market_cap: 10000 - i, holder_count: 0) + + labels = + for(i <- 0..50, do: insert(:address_to_tag, tag: build(:address_tag, display_name: "#{name} #{i}"))) + |> Enum.sort_by(fn x -> x.tag.display_name end) + + request = get(conn, "/api/v2/search?q=#{name}") + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 50 + assert response["next_page_params"] != nil + assert Enum.at(response["items"], 0)["type"] == "label" + assert Enum.at(response["items"], 49)["type"] == "label" + + request_2 = get(conn, "/api/v2/search", response["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_2 = json_response(request_2, 200) + + assert Enum.count(response_2["items"]) == 50 + assert response_2["next_page_params"] != nil + assert Enum.at(response_2["items"], 0)["type"] == "label" + assert Enum.at(response_2["items"], 1)["type"] == "token" + assert Enum.at(response_2["items"], 49)["type"] == "token" + + request_3 = get(conn, "/api/v2/search", response_2["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_3 = json_response(request_3, 200) + + assert Enum.count(response_3["items"]) == 50 + assert response_3["next_page_params"] != nil + assert Enum.at(response_3["items"], 0)["type"] == "token" + assert Enum.at(response_3["items"], 1)["type"] == "token" + assert Enum.at(response_3["items"], 2)["type"] == "contract" + assert Enum.at(response_3["items"], 49)["type"] == "contract" + + request_4 = get(conn, "/api/v2/search", response_3["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_4 = json_response(request_4, 200) + + assert Enum.count(response_4["items"]) == 3 + assert response_4["next_page_params"] == nil + assert Enum.all?(response_4["items"], fn x -> x["type"] == "contract" end) + + labels_from_api = response["items"] ++ [Enum.at(response_2["items"], 0)] + + assert labels + |> Enum.zip(labels_from_api) + |> Enum.all?(fn {label, item} -> + label.tag.display_name == item["name"] && item["type"] == "label" && + item["address"] == Address.checksum(label.address_hash) + end) + + tokens_from_api = Enum.slice(response_2["items"], 1, 49) ++ Enum.slice(response_3["items"], 0, 2) + + assert tokens + |> Enum.zip(tokens_from_api) + |> Enum.all?(fn {token, item} -> + token.name == item["name"] && item["type"] == "token" && + item["address"] == Address.checksum(token.contract_address_hash) + end) + + contracts_from_api = Enum.slice(response_3["items"], 2, 48) ++ response_4["items"] + + assert contracts + |> Enum.zip(contracts_from_api) + |> Enum.all?(fn {contract, item} -> + contract.name == item["name"] && item["type"] == "contract" && + item["address"] == Address.checksum(contract.address_hash) + end) + end + + test "check pagination #2 (token should be ranged by fiat_value)", %{conn: conn} do + name = "contract" + contracts = for(i <- 0..50, do: insert(:smart_contract, name: "#{name} #{i}")) |> Enum.sort_by(fn x -> x.name end) + + tokens = + for i <- 0..50, do: insert(:token, name: "#{name} #{i}", fiat_value: 10000 - i, holder_count: 0) + + labels = + for(i <- 0..50, do: insert(:address_to_tag, tag: build(:address_tag, display_name: "#{name} #{i}"))) + |> Enum.sort_by(fn x -> x.tag.display_name end) + + request = get(conn, "/api/v2/search?q=#{name}") + assert response = json_response(request, 200) + + assert Enum.count(response["items"]) == 50 + assert response["next_page_params"] != nil + assert Enum.at(response["items"], 0)["type"] == "label" + assert Enum.at(response["items"], 49)["type"] == "label" + + request_2 = get(conn, "/api/v2/search", response["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_2 = json_response(request_2, 200) + + assert Enum.count(response_2["items"]) == 50 + assert response_2["next_page_params"] != nil + assert Enum.at(response_2["items"], 0)["type"] == "label" + assert Enum.at(response_2["items"], 1)["type"] == "token" + assert Enum.at(response_2["items"], 49)["type"] == "token" + + request_3 = get(conn, "/api/v2/search", response_2["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_3 = json_response(request_3, 200) + + assert Enum.count(response_3["items"]) == 50 + assert response_3["next_page_params"] != nil + assert Enum.at(response_3["items"], 0)["type"] == "token" + assert Enum.at(response_3["items"], 1)["type"] == "token" + assert Enum.at(response_3["items"], 2)["type"] == "contract" + assert Enum.at(response_3["items"], 49)["type"] == "contract" + + request_4 = get(conn, "/api/v2/search", response_3["next_page_params"] |> Query.encode() |> Query.decode()) + assert response_4 = json_response(request_4, 200) + + assert Enum.count(response_4["items"]) == 3 + assert response_4["next_page_params"] == nil + assert Enum.all?(response_4["items"], fn x -> x["type"] == "contract" end) + + labels_from_api = response["items"] ++ [Enum.at(response_2["items"], 0)] + + assert labels + |> Enum.zip(labels_from_api) + |> Enum.all?(fn {label, item} -> + label.tag.display_name == item["name"] && item["type"] == "label" && + item["address"] == Address.checksum(label.address_hash) + end) + + tokens_from_api = Enum.slice(response_2["items"], 1, 49) ++ Enum.slice(response_3["items"], 0, 2) + + assert tokens + |> Enum.zip(tokens_from_api) + |> Enum.all?(fn {token, item} -> + token.name == item["name"] && item["type"] == "token" && + item["address"] == Address.checksum(token.contract_address_hash) + end) + + contracts_from_api = Enum.slice(response_3["items"], 2, 48) ++ response_4["items"] + + assert contracts + |> Enum.zip(contracts_from_api) + |> Enum.all?(fn {contract, item} -> + contract.name == item["name"] && item["type"] == "contract" && + item["address"] == Address.checksum(contract.address_hash) + end) + end + test "search token", %{conn: conn} do token = insert(:unique_token) @@ -219,22 +372,30 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do end test "search transaction with timestamp", %{conn: conn} do - transaction = :transaction |> insert() |> with_block() + transaction = :transaction |> insert() + block = insert(:block, hash: transaction.hash) + transaction |> with_block(block) request = get(conn, "/api/v2/search?q=#{transaction.hash}") assert response = json_response(request, 200) - assert Enum.count(response["items"]) == 1 + assert Enum.count(response["items"]) == 2 assert response["next_page_params"] == nil - item = Enum.at(response["items"], 0) + transaction_item = Enum.find(response["items"], fn x -> x["type"] == "transaction" end) - assert item["type"] == "transaction" - assert item["transaction_hash"] == to_string(transaction.hash) - assert item["url"] =~ to_string(transaction.hash) + assert transaction_item["type"] == "transaction" + assert transaction_item["transaction_hash"] == to_string(transaction.hash) + assert transaction_item["url"] =~ to_string(transaction.hash) + + assert transaction_item["timestamp"] == + block.timestamp |> to_string() |> String.replace(" ", "T") - assert item["timestamp"] == - Repo.preload(transaction, [:block]).block.timestamp |> to_string() |> String.replace(" ", "T") + block_item = Enum.find(response["items"], fn x -> x["type"] == "block" end) + assert block_item["type"] == "block" + assert block_item["block_hash"] == to_string(block.hash) + assert block_item["url"] =~ to_string(block.hash) + assert transaction_item["timestamp"] == block_item["timestamp"] end test "search tags", %{conn: conn} do @@ -256,7 +417,7 @@ defmodule BlockScoutWeb.API.V2.SearchControllerTest do end test "check that simultaneous search of ", %{conn: conn} do - block = insert(:block) + block = insert(:block, number: 10000) insert(:smart_contract, name: to_string(block.number)) insert(:token, name: to_string(block.number)) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs index c7a25a5d2dc7..407d50026717 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/smart_contract_controller_test.exs @@ -3502,6 +3502,41 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do assert sc["address"]["is_contract"] == true end + test "get filtered smart contracts when flags are set", %{conn: conn} do + smart_contract = insert(:smart_contract, abi: nil, language: :yul) + insert(:smart_contract) + request = get(conn, "/api/v2/smart-contracts", %{"filter" => "yul"}) + + assert %{"items" => [sc], "next_page_params" => nil} = json_response(request, 200) + compare_item(smart_contract, sc) + assert sc["address"]["is_verified"] == true + assert sc["address"]["is_contract"] == true + end + + test "get filtered smart contracts when language is set", %{conn: conn} do + smart_contract = insert(:smart_contract, is_vyper_contract: true, language: :vyper) + insert(:smart_contract, is_vyper_contract: false) + request = get(conn, "/api/v2/smart-contracts", %{"filter" => "vyper"}) + + assert %{"items" => [sc], "next_page_params" => nil} = json_response(request, 200) + compare_item(smart_contract, sc) + assert sc["address"]["is_verified"] == true + assert sc["address"]["is_contract"] == true + end + + if Application.compile_env(:explorer, :chain_type) == :zilliqa do + test "get filtered scilla smart contracts when language is set", %{conn: conn} do + smart_contract = insert(:smart_contract, language: :scilla) + insert(:smart_contract) + request = get(conn, "/api/v2/smart-contracts", %{"filter" => "scilla"}) + + assert %{"items" => [sc], "next_page_params" => nil} = json_response(request, 200) + compare_item(smart_contract, sc) + assert sc["address"]["is_verified"] == true + assert sc["address"]["is_contract"] == true + end + end + test "check pagination", %{conn: conn} do smart_contracts = for _ <- 0..50 do @@ -3638,7 +3673,7 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do assert smart_contract.optimization == json["optimization_enabled"] - assert json["language"] == if(smart_contract.is_vyper_contract, do: "vyper", else: "solidity") + assert json["language"] == smart_contract_language(smart_contract) assert json["verified_at"] assert !is_nil(smart_contract.constructor_arguments) == json["has_constructor_args"] assert Address.checksum(smart_contract.address_hash) == json["address"]["hash"] @@ -3680,8 +3715,11 @@ defmodule BlockScoutWeb.API.V2.SmartContractControllerTest do is_nil(smart_contract.abi) -> "yul" - true -> + is_nil(smart_contract.language) -> "solidity" + + true -> + to_string(smart_contract.language) end end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs index f8ff110a5fe7..102f1b28b8f5 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/verification_controller_test.exs @@ -105,7 +105,7 @@ defmodule BlockScoutWeb.API.V2.VerificationControllerTest do Application.put_env(:explorer, :solc_bin_api_url, "https://solc-bin.ethereum.org") - contract_address = insert(:contract_address, contract_code: "0x") + contract_address = insert(:contract_address, contract_code: "0x01") :transaction |> insert( diff --git a/apps/block_scout_web/test/block_scout_web/views/abi_encoded_value_view_test.exs b/apps/block_scout_web/test/block_scout_web/views/abi_encoded_value_view_test.exs index 2953bb801ebe..9d29afe2449e 100644 --- a/apps/block_scout_web/test/block_scout_web/views/abi_encoded_value_view_test.exs +++ b/apps/block_scout_web/test/block_scout_web/views/abi_encoded_value_view_test.exs @@ -13,8 +13,6 @@ defmodule BlockScoutWeb.ABIEncodedValueViewTest do other -> other end - |> Phoenix.HTML.Safe.to_iodata() - |> IO.iodata_to_binary() end defp copy_text(type, value) do diff --git a/apps/ethereum_jsonrpc/README.md b/apps/ethereum_jsonrpc/README.md index 945c793da83e..704470fc3a3b 100644 --- a/apps/ethereum_jsonrpc/README.md +++ b/apps/ethereum_jsonrpc/README.md @@ -15,7 +15,7 @@ config :ethereum_jsonrpc, ``` Note: the tracing node URL is provided separately from `:url`, -via `:trace_url`. The trace URL and is used for +via `:trace_url`. The trace URL is used for `fetch_internal_transactions`, which is only a supported method on tracing nodes. The `:http` option is passed directly to the HTTP library (`HTTPoison`), which forwards the options down to `:hackney`. diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc.ex index 69012ce9f1be..0503964b8d77 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc.ex @@ -292,13 +292,15 @@ defmodule EthereumJSONRPC do @doc """ Fetches blocks by block hashes. - Transaction data is included for each block. + Transaction data is included for each block by default. + Set `with_transactions` parameter to false to exclude tx data. """ - @spec fetch_blocks_by_hash([hash()], json_rpc_named_arguments) :: {:ok, Blocks.t()} | {:error, reason :: term} - def fetch_blocks_by_hash(block_hashes, json_rpc_named_arguments) do + @spec fetch_blocks_by_hash([hash()], json_rpc_named_arguments, boolean()) :: + {:ok, Blocks.t()} | {:error, reason :: term} + def fetch_blocks_by_hash(block_hashes, json_rpc_named_arguments, with_transactions? \\ true) do block_hashes |> Enum.map(fn block_hash -> %{hash: block_hash} end) - |> fetch_blocks_by_params(&Block.ByHash.request/1, json_rpc_named_arguments) + |> fetch_blocks_by_params(&Block.ByHash.request(&1, with_transactions?), json_rpc_named_arguments) end @doc """ @@ -364,16 +366,6 @@ defmodule EthereumJSONRPC do * `{:error, reason}` - other JSONRPC error. """ - @spec fetch_block_number_by_tag_op_version(tag(), json_rpc_named_arguments) :: - {:ok, non_neg_integer()} | {:error, reason :: :invalid_tag | :not_found | term()} - def fetch_block_number_by_tag_op_version(tag, json_rpc_named_arguments) - when tag in ~w(earliest latest pending safe) do - %{id: 0, tag: tag} - |> Block.ByTag.request() - |> json_rpc(json_rpc_named_arguments) - |> Block.ByTag.number_from_result() - end - @spec fetch_block_number_by_tag(tag(), json_rpc_named_arguments) :: {:ok, non_neg_integer()} | {:error, reason :: :invalid_tag | :not_found | term()} def fetch_block_number_by_tag(tag, json_rpc_named_arguments) when tag in ~w(earliest latest pending safe) do @@ -458,6 +450,11 @@ defmodule EthereumJSONRPC do iex> id_to_params([%{block: 1}, %{block: 2}]) %{0 => %{block: 1}, 1 => %{block: 2}} """ + @spec id_to_params([]) :: %{} + def id_to_params([]) do + %{} + end + @spec id_to_params([params]) :: %{id => params} when id: non_neg_integer(), params: any() def id_to_params(params_list) do params_list diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/application.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/application.ex index 7a9e1dae1369..a03f80e27603 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/application.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/application.ex @@ -14,15 +14,19 @@ defmodule EthereumJSONRPC.Application do rolling_window_opts = Keyword.fetch!(config, :rolling_window_opts) - [ - :hackney_pool.child_spec(:ethereum_jsonrpc, recv_timeout: 60_000, timeout: 60_000, max_connections: 1000), - Supervisor.child_spec({RollingWindow, [rolling_window_opts]}, id: RollingWindow.ErrorThrottle), - {EndpointAvailabilityObserver, []}, - {EndpointAvailabilityChecker, []} - ] - |> add_throttle_rolling_window(config) - |> add_ipc_client() - |> Supervisor.start_link(strategy: :one_for_one, name: EthereumJSONRPC.Supervisor) + if Application.get_env(:nft_media_handler, :standalone_media_worker?) do + Supervisor.start_link([], strategy: :one_for_one, name: EthereumJSONRPC.Supervisor) + else + [ + :hackney_pool.child_spec(:ethereum_jsonrpc, recv_timeout: 60_000, timeout: 60_000, max_connections: 1000), + Supervisor.child_spec({RollingWindow, [rolling_window_opts]}, id: RollingWindow.ErrorThrottle), + {EndpointAvailabilityObserver, []}, + {EndpointAvailabilityChecker, []} + ] + |> add_throttle_rolling_window(config) + |> add_ipc_client() + |> Supervisor.start_link(strategy: :one_for_one, name: EthereumJSONRPC.Supervisor) + end end defp add_throttle_rolling_window(children, config) do diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum.ex new file mode 100644 index 000000000000..fc66abe73535 --- /dev/null +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum.ex @@ -0,0 +1,383 @@ +defmodule EthereumJSONRPC.Arbitrum do + @moduledoc """ + Arbitrum specific routines used to fetch and process + data from the associated JSONRPC endpoint + """ + + import EthereumJSONRPC + + alias EthereumJSONRPC.Arbitrum.Constants.Contracts, as: ArbitrumContracts + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents + + require Logger + alias ABI.TypeDecoder + + @typedoc """ + This type describes significant fields which can be extracted from + the L2ToL1Tx event emitted by ArbSys contract + + * `"message_id"` - The message identifier + * `"caller"` - `t:EthereumJSONRPC.address/0` of the message initiator + * `"destination"` - `t:EthereumJSONRPC.address/0` to which the message should be sent after the claiming + * `"arb_block_number"` - Rollup block number in which the message was initiated + * `"eth_block_number"` - An associated parent chain block number + * `"timestamp"` - When the message was initiated + * `"callvalue"` - Amount of ETH which should be transferred to the `destination` address on message execution + * `"data"` - Raw calldata which should be set for the execution transaction (usually contains bridge interaction calldata) + """ + @type l2_to_l1_event :: %{ + :message_id => non_neg_integer(), + :caller => EthereumJSONRPC.address(), + :destination => EthereumJSONRPC.address(), + :arb_block_number => non_neg_integer(), + :eth_block_number => non_neg_integer(), + :timestamp => non_neg_integer(), + :callvalue => non_neg_integer(), + :data => binary() + } + + @doc """ + Retrieves specific contract addresses associated with Arbitrum rollup contract. + + This function fetches the addresses of the bridge, sequencer inbox, and outbox + contracts related to the specified Arbitrum rollup address. It invokes one of + the contract methods `bridge()`, `sequencerInbox()`, or `outbox()` based on + the `contracts_set` parameter to obtain the required information. + + ## Parameters + - `rollup_address`: The address of the Arbitrum rollup contract from which + information is being retrieved. + - `contracts_set`: A symbol indicating the set of contracts to retrieve (`:bridge` + for the bridge contract, `:inbox_outbox` for the sequencer + inbox and outbox contracts). + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + + ## Returns + - A map with keys corresponding to the contract types (`:bridge`, `:sequencer_inbox`, + `:outbox`) and values representing the contract addresses. + """ + @spec get_contracts_for_rollup( + EthereumJSONRPC.address(), + :bridge | :inbox_outbox, + EthereumJSONRPC.json_rpc_named_arguments() + ) :: %{(:bridge | :sequencer_inbox | :outbox) => binary()} + def get_contracts_for_rollup(rollup_address, contracts_set, json_rpc_named_arguments) + + def get_contracts_for_rollup(rollup_address, :bridge, json_rpc_named_arguments) do + call_simple_getters_in_rollup_contract( + rollup_address, + [ArbitrumContracts.bridge_selector()], + json_rpc_named_arguments + ) + end + + def get_contracts_for_rollup(rollup_address, :inbox_outbox, json_rpc_named_arguments) do + call_simple_getters_in_rollup_contract( + rollup_address, + [ArbitrumContracts.sequencer_inbox_selector(), ArbitrumContracts.outbox_selector()], + json_rpc_named_arguments + ) + end + + # Calls getter functions on a rollup contract and collects their return values. + # + # This function is designed to interact with a rollup contract and invoke specified getter methods. + # It creates a list of requests for each method ID, executes these requests with retries as needed, + # and then maps the results to the corresponding method IDs. + # + # ## Parameters + # - `rollup_address`: The address of the rollup contract to interact with. + # - `method_ids`: A list of method identifiers representing the getter functions to be called. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - A map where each key is a method identifier converted to an atom, and each value is the + # response from calling the respective method on the contract. + defp call_simple_getters_in_rollup_contract(rollup_address, method_ids, json_rpc_named_arguments) do + method_ids + |> Enum.map(fn method_id -> + %{ + contract_address: rollup_address, + method_id: method_id, + args: [] + } + end) + |> EthereumJSONRPC.execute_contract_functions(ArbitrumContracts.rollup_contract_abi(), json_rpc_named_arguments) + |> Enum.zip(method_ids) + |> Enum.reduce(%{}, fn {{:ok, [response]}, method_id}, retval -> + Map.put(retval, ArbitrumContracts.atomized_rollup_contract_selector(method_id), response) + end) + end + + @doc """ + Retrieves the latest confirmed node index for withdrawals Merkle tree. + + This function fetches an actual confirmed L2->L1 node from the Arbitrum rollup address. + It invokes contract method `latestConfirmed()` to obtain the required information. + + ## Parameters + - `rollup_address`: The address of the Arbitrum rollup contract from which + information is being retrieved. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection (L1 chain). + + ## Returns + - {:ok, number} - where `number` is a positive integer representing the latest confirmed node index + {:error, _} - in case of any failure + """ + @spec get_latest_confirmed_node_index( + EthereumJSONRPC.address(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: {:ok, non_neg_integer()} | {:error, any()} + def get_latest_confirmed_node_index(rollup_address, json_rpc_l1_named_arguments) do + case read_contract( + rollup_address, + ArbitrumContracts.latest_confirmed_selector(), + [], + ArbitrumContracts.rollup_contract_abi(), + json_rpc_l1_named_arguments + ) do + {:ok, [value]} -> + {:ok, value} + + {:error, err} -> + Logger.error("rollup_contract.latestConfirmed() error occurred: #{inspect(err)}") + {:error, err} + end + end + + @doc """ + Retrieves the L1 block number in which the rollup node with the provided index was created. + + This function fetches node information by specified node index + It invokes Rollup contract method `getNode(nodeNum)` to obtain the required data. + + ## Parameters + - `rollup_address`: The address of the Arbitrum rollup contract from which + information is being retrieved. + - `node_index`: index of the requested rollup node + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection (L1). + + ## Returns + - {:ok, number} - where `number` is block number (L1) in which the rollup node was created + {:error, _} - in case of any failure + """ + @spec get_node_creation_block_number( + EthereumJSONRPC.address(), + non_neg_integer(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: {:ok, non_neg_integer()} | {:error, any()} + def get_node_creation_block_number(rollup_address, node_index, json_rpc_l1_named_arguments) do + case read_contract( + rollup_address, + ArbitrumContracts.get_node_selector(), + [node_index], + ArbitrumContracts.rollup_contract_abi(), + json_rpc_l1_named_arguments + ) do + # `createdAtBlock` property of node tuple + {:ok, [fields]} -> {:ok, fields |> Kernel.elem(10)} + {:error, err} -> {:error, err} + end + end + + @doc """ + Parses an L2-to-L1 event, extracting relevant information from the event's data. + + This function takes an L2ToL1Tx event emitted by ArbSys contract and parses its fields + to extract needed message properties. + + ## Parameters + - `event`: A log entry representing an L2-to-L1 message event. + + ## Returns + - A map describing the L2-to-L1 message + """ + @spec l2_to_l1_event_parse(%{ + :data => binary(), + :second_topic => binary(), + :fourth_topic => binary(), + optional(atom()) => any() + }) :: l2_to_l1_event() + def l2_to_l1_event_parse(event) do + [ + caller, + arb_block_number, + eth_block_number, + timestamp, + callvalue, + data + ] = + event.data + |> decode_data(ArbitrumEvents.l2_to_l1_unindexed_params()) + + position = + case quantity_to_integer(event.fourth_topic) do + nil -> 0 + number -> number + end + + caller_string = value_to_address(caller) + destination_string = value_to_address(event.second_topic) + + %{ + :message_id => position, + :caller => caller_string, + :destination => destination_string, + :arb_block_number => arb_block_number, + :eth_block_number => eth_block_number, + :timestamp => timestamp, + :callvalue => callvalue, + :data => data + } + end + + # Decode ABI-encoded data in accordance with the provided types + @spec decode_data(binary() | map(), list()) :: list() | nil + defp decode_data("0x", types) do + for _ <- types, do: nil + end + + defp decode_data("0x" <> encoded_data, types) do + decode_data(encoded_data, types) + end + + defp decode_data(encoded_data, types) do + encoded_data + |> Base.decode16!(case: :mixed) + |> TypeDecoder.decode_raw(types) + end + + # Casting value into the Ethereum address (hex-string, 0x-prefixed) + @spec value_to_address(binary()) :: String.t() + defp value_to_address(value) do + hex = + cond do + is_binary(value) and String.starts_with?(value, "0x") -> String.trim_leading(value, "0x") + is_binary(value) -> Base.encode16(value, case: :lower) + true -> raise ArgumentError, "Unsupported address format" + end + + padded_hex = + hex + |> String.trim_leading("0") + |> String.pad_leading(40, "0") + + "0x" <> padded_hex + end + + @doc """ + Calculates the proof needed to claim an L2->L1 message. + + Calls the `constructOutboxProof` method of the Node Interface contract on the + rollup to obtain the data needed for an L2->L1 message claim. + + ## Parameters + - `node_interface_address`: Address of the node interface contract + - `size`: Index of the latest confirmed node (cumulative number of confirmed + L2->L1 transactions) + - `leaf`: Position of the L2->L1 message (`position` field of the associated + `L2ToL1Tx` event). Must be less than `size` + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC + connection + + ## Returns + - `{:ok, [send, root, proof]}` where `proof` is an array of 32-byte values + needed to execute messages + - `{:error, _}` if size is less than or equal to leaf, or if an RPC error + occurs + """ + @spec construct_outbox_proof( + EthereumJSONRPC.address(), + non_neg_integer(), + non_neg_integer(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: {:ok, any()} | {:error, :invalid} + def construct_outbox_proof(_, size, leaf, _) when size <= leaf do + {:error, :invalid} + end + + def construct_outbox_proof(node_interface_address, size, leaf, json_rpc_named_arguments) do + case read_contract( + node_interface_address, + ArbitrumContracts.construct_outbox_proof_selector(), + [size, leaf], + ArbitrumContracts.node_interface_contract_abi(), + json_rpc_named_arguments + ) do + {:ok, proof} -> + {:ok, proof} + + {:error, err} -> + Logger.error("node_interface_contract.constructOutboxProof error occurred: #{inspect(err)}") + {:error, :invalid} + end + end + + @doc """ + Check is outgoing L2->L1 message was spent. + + To do that we should invoke `isSpent(uint256 index)` method for + `Outbox` contract deployed on 1 chain + + ## Parameters + - `outbox_contract`: address of the Outbox contract (L1 chain) + - `index`: position (index) of the requested L2->L1 message. + - `json_l1_rpc_named_arguments`: Configuration parameters for the JSON RPC + connection for L1 chain. + + ## Returns + - `{:ok, is_spent}`, where `is_spent` equals `true` if message was created, confirmed and claimed on L1 + `{:error, _}` in case of any failure + """ + @spec withdrawal_spent?( + EthereumJSONRPC.address(), + non_neg_integer(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: {:ok, boolean()} | {:error, any()} + def withdrawal_spent?(outbox_contract, position, json_l1_rpc_named_arguments) do + case read_contract( + outbox_contract, + ArbitrumContracts.is_spent_selector(), + [position], + ArbitrumContracts.outbox_contract_abi(), + json_l1_rpc_named_arguments + ) do + {:ok, [value]} -> + {:ok, value} + + {:error, err} -> + Logger.error("outbox_contract.isSpent(position) error occurred: #{inspect(err)}") + {:error, err} + end + end + + # Read a specified contract by provided selector and parameters from the RPC node + # + # ## Parameters + # - `contract_address`: The address of the contract to interact with. + # - `contract_selector`: Selector in form of 4-byte hex-string without 0x prefix + # - `call_arguments`: List of the contract function parameters ([] if there are no parameters for the functions) + # - `contract_abi`: The contract ABI which contains invoked function description + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - `{:ok, term()}` in case of success call or `{:error, String.t()}` on error + @spec read_contract( + EthereumJSONRPC.address(), + String.t(), + [any()], + [map()], + EthereumJSONRPC.json_rpc_named_arguments() + ) :: EthereumJSONRPC.Contract.call_result() + defp read_contract(contract_address, contract_selector, call_arguments, contract_abi, json_rpc_named_arguments) do + [ + %{ + contract_address: contract_address, + method_id: contract_selector, + args: call_arguments + } + ] + |> EthereumJSONRPC.execute_contract_functions(contract_abi, json_rpc_named_arguments) + |> List.first() + end +end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/contracts.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/contracts.ex new file mode 100644 index 000000000000..37dcdcfe1007 --- /dev/null +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/contracts.ex @@ -0,0 +1,396 @@ +defmodule EthereumJSONRPC.Arbitrum.Constants.Contracts do + @moduledoc """ + Provides constants and ABI definitions for Arbitrum-specific smart contracts. + + This module contains function selectors, contract ABIs, and helper functions for + interacting with core Arbitrum protocol contracts including: + """ + + @selector_outbox "ce11e6ab" + @selector_sequencer_inbox "ee35f327" + @selector_bridge "e78cea92" + + @doc """ + Returns selector of the `outbox()` function + """ + @spec outbox_selector() :: <<_::64>> + def outbox_selector, do: @selector_outbox + + @doc """ + Returns selector of the `sequencerInbox()` function + """ + @spec sequencer_inbox_selector() :: <<_::64>> + def sequencer_inbox_selector, do: @selector_sequencer_inbox + + @doc """ + Returns selector of the `bridge()` function + """ + @spec bridge_selector() :: <<_::64>> + def bridge_selector, do: @selector_bridge + + @doc """ + Returns atomized selector of Rollup contract method + + ## Parameters + - `selector`: The selector of the Rollup contract method + + ## Returns + - One of the following atoms: `:outbox`, `:sequencer_inbox`, `:bridge` + """ + @spec atomized_rollup_contract_selector(<<_::64>>) :: atom() + def atomized_rollup_contract_selector(@selector_outbox), do: :outbox + def atomized_rollup_contract_selector(@selector_sequencer_inbox), do: :sequencer_inbox + def atomized_rollup_contract_selector(@selector_bridge), do: :bridge + + @doc """ + Returns selector of the `latestConfirmed()` function + """ + @spec latest_confirmed_selector() :: <<_::64>> + def latest_confirmed_selector, do: "65f7f80d" + + @doc """ + Returns selector of the `getNode(uint64 nodeNum)` function + """ + @spec get_node_selector() :: <<_::64>> + def get_node_selector, do: "92c8134c" + + @doc """ + Returns ABI of the rollup contract + """ + @spec rollup_contract_abi() :: [map()] + def rollup_contract_abi, + do: [ + %{ + "inputs" => [], + "name" => "outbox", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "sequencerInbox", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "bridge", + "outputs" => [ + %{ + "internalType" => "address", + "name" => "", + "type" => "address" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [], + "name" => "latestConfirmed", + "outputs" => [ + %{ + "internalType" => "uint64", + "name" => "", + "type" => "uint64" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [ + %{ + "internalType" => "uint64", + "name" => "", + "type" => "uint64" + } + ], + "name" => "getNode", + "outputs" => [ + %{ + "type" => "tuple", + "name" => "", + "internalType" => "struct Node", + "components" => [ + %{"type" => "bytes32", "name" => "stateHash", "internalType" => "bytes32"}, + %{"type" => "bytes32", "name" => "challengeHash", "internalType" => "bytes32"}, + %{"type" => "bytes32", "name" => "confirmData", "internalType" => "bytes32"}, + %{"type" => "uint64", "name" => "prevNum", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "deadlineBlock", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "noChildConfirmedBeforeBlock", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "stakerCount", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "childStakerCount", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "firstChildBlock", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "latestChildNumber", "internalType" => "uint64"}, + %{"type" => "uint64", "name" => "createdAtBlock", "internalType" => "uint64"}, + %{"type" => "bytes32", "name" => "nodeHash", "internalType" => "bytes32"} + ] + } + ], + "stateMutability" => "view", + "type" => "function" + } + ] + + @doc """ + Returns address of precompile NodeInterface precompile on Arbitrum chain + """ + @spec node_interface_contract_address() :: <<_::336>> + def node_interface_contract_address, do: "0x00000000000000000000000000000000000000c8" + + @doc """ + Returns selector of the `constructOutboxProof(uint64 size, uint64 leaf)` function + """ + @spec construct_outbox_proof_selector() :: <<_::64>> + def construct_outbox_proof_selector, do: "42696350" + + @doc """ + Returns selector of the `findBatchContainingBlock(uint64 blockNum)` function + """ + @spec find_batch_containing_block_selector() :: <<_::64>> + def find_batch_containing_block_selector, do: "81f1adaf" + + @doc """ + Returns ABI of the node interface contract + """ + @spec node_interface_contract_abi() :: [map()] + def node_interface_contract_abi, + do: [ + %{ + "inputs" => [ + %{ + "internalType" => "uint64", + "name" => "size", + "type" => "uint64" + }, + %{ + "internalType" => "uint64", + "name" => "leaf", + "type" => "uint64" + } + ], + "name" => "constructOutboxProof", + "outputs" => [ + %{ + "internalType" => "bytes32", + "name" => "send", + "type" => "bytes32" + }, + %{ + "internalType" => "bytes32", + "name" => "root", + "type" => "bytes32" + }, + %{ + "internalType" => "bytes32[]", + "name" => "proof", + "type" => "bytes32[]" + } + ], + "stateMutability" => "view", + "type" => "function" + }, + %{ + "inputs" => [ + %{ + "internalType" => "uint64", + "name" => "blockNum", + "type" => "uint64" + } + ], + "name" => "findBatchContainingBlock", + "outputs" => [ + %{ + "internalType" => "uint64", + "name" => "batch", + "type" => "uint64" + } + ], + "stateMutability" => "view", + "type" => "function" + } + ] + + @doc """ + Returns selector of the `isSpent(uint256 index)` function + """ + @spec is_spent_selector() :: <<_::64>> + def is_spent_selector, do: "5a129efe" + # credo:disable-for-previous-line Credo.Check.Readability.PredicateFunctionNames + + @doc """ + Returns ABI of the outbox contract + """ + @spec outbox_contract_abi() :: [map()] + def outbox_contract_abi, + do: [ + %{ + "inputs" => [ + %{ + "internalType" => "uint256", + "name" => "index", + "type" => "uint256" + } + ], + "name" => "isSpent", + "outputs" => [ + %{ + "internalType" => "bool", + "name" => "", + "type" => "bool" + } + ], + "stateMutability" => "view", + "type" => "function" + } + ] + + @doc """ + Returns selector with ABI (object of `ABI.FunctionSelector`) of the `finalizeInboundTransfer(...)` function + """ + def finalize_inbound_transfer_selector_with_abi, + do: %ABI.FunctionSelector{ + function: "finalizeInboundTransfer", + returns: [], + types: [ + # _token + :address, + # _from + :address, + # _to + :address, + # _amount + {:uint, 256}, + # data + :bytes + ] + } + + @doc """ + Returns selector with ABI (object of `ABI.FunctionSelector`) of the `executeTransaction(...)` function + """ + def execute_transaction_selector_with_abi, + do: %ABI.FunctionSelector{ + function: "executeTransaction", + returns: [], + types: [ + # proof + {:array, {:bytes, 32}}, + # index + {:uint, 256}, + # l2Sender + :address, + # to + :address, + # l2Block + {:uint, 256}, + # l1Block + {:uint, 256}, + # l2Timestamp + {:uint, 256}, + # value + {:uint, 256}, + # data + :bytes + ], + type: :function, + inputs_indexed: [] + } + + @doc """ + Returns selector of the `getKeysetCreationBlock(bytes32 ksHash)` function + """ + @spec get_keyset_creation_block_selector() :: <<_::64>> + def get_keyset_creation_block_selector, do: "258f0495" + + @doc """ + Returns ABI of the sequencer inbox contract + """ + @spec sequencer_inbox_contract_abi() :: [map()] + def sequencer_inbox_contract_abi, + do: [ + %{ + "inputs" => [%{"internalType" => "bytes32", "name" => "ksHash", "type" => "bytes32"}], + "name" => "getKeysetCreationBlock", + "outputs" => [%{"internalType" => "uint256", "name" => "", "type" => "uint256"}], + "stateMutability" => "view", + "type" => "function" + } + ] + + @doc """ + Returns selector with ABI (object of `ABI.FunctionSelector`) of the `addSequencerL2BatchFromBlobs(...)` function + """ + def add_sequencer_l2_batch_from_blobs_selector_with_abi, + do: %ABI.FunctionSelector{ + function: "addSequencerL2BatchFromBlobs", + types: [ + {:uint, 256}, + {:uint, 256}, + :address, + {:uint, 256}, + {:uint, 256} + ] + } + + @doc """ + Returns selector with ABI (object of `ABI.FunctionSelector`) of the function: + + addSequencerL2BatchFromOrigin( + uint256 sequenceNumber, + bytes calldata data, + uint256 afterDelayedMessagesRead, + address gasRefunder, + uint256 prevMessageCount, + uint256 newMessageCount + ) + """ + def add_sequencer_l2_batch_from_origin_8f111f3c_selector_with_abi, + do: %ABI.FunctionSelector{ + function: "addSequencerL2BatchFromOrigin", + types: [ + {:uint, 256}, + :bytes, + {:uint, 256}, + :address, + {:uint, 256}, + {:uint, 256} + ] + } + + @doc """ + Returns selector with ABI (object of `ABI.FunctionSelector`) of the function: + + addSequencerL2BatchFromOrigin( + uint256 sequenceNumber, + bytes calldata data, + uint256 afterDelayedMessagesRead, + address gasRefunder + ) + """ + def add_sequencer_l2_batch_from_origin_6f12b0c9_selector_with_abi, + do: %ABI.FunctionSelector{ + function: "addSequencerL2BatchFromOrigin", + types: [ + {:uint, 256}, + :bytes, + {:uint, 256}, + :address + ] + } +end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/events.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/events.ex new file mode 100644 index 000000000000..1a18f7548691 --- /dev/null +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/arbitrum/constants/events.ex @@ -0,0 +1,155 @@ +defmodule EthereumJSONRPC.Arbitrum.Constants.Events do + @moduledoc """ + Provides constant values for Arbitrum-specific event signatures and their parameter types. + + This module contains keccak256 hashes of event signatures and their corresponding unindexed + parameter types for various Arbitrum protocol events, including: + - L2ToL1Tx + - NodeCreated + - SetValidKeyset + - SequencerBatchDelivered + - SendRootUpdated + - OutBoxTransactionExecuted + - MessageDelivered + + Each event signature is stored as a 32-byte string and is accompanied by helper functions + to access both the signature and, where applicable, the unindexed parameter types used + in event decoding. + """ + + # keccak256("L2ToL1Tx(address,address,uint256,uint256,uint256,uint256,uint256,uint256,bytes)") + @l2_to_l1 "0x3e7aafa77dbf186b7fd488006beff893744caa3c4f6f299e8a709fa2087374fc" + @l2_to_l1_unindexed_params [ + :address, + {:uint, 256}, + {:uint, 256}, + {:uint, 256}, + {:uint, 256}, + :bytes + ] + + @doc """ + Returns 32-byte signature of the event `L2ToL1Tx` + """ + @spec l2_to_l1() :: <<_::528>> + def l2_to_l1, do: @l2_to_l1 + + @spec l2_to_l1_unindexed_params() :: [atom() | {atom(), non_neg_integer()}] + def l2_to_l1_unindexed_params, do: @l2_to_l1_unindexed_params + + # keccak256("NodeCreated(uint64,bytes32,bytes32,bytes32,(((bytes32[2],uint64[2]),uint8),((bytes32[2],uint64[2]),uint8),uint64),bytes32,bytes32,uint256)") + @node_created "0x4f4caa9e67fb994e349dd35d1ad0ce23053d4323f83ce11dc817b5435031d096" + @node_created_unindexed_params [ + {:bytes, 32}, + # Assertion assertion + {:tuple, + [ + # ExecutionState beforeState + {:tuple, + [ + # GlobalState globalState + {:tuple, + [ + # bytes32[2] bytes32Values + {:array, {:bytes, 32}, 2}, + # uint64[2] u64Values + {:array, {:uint, 64}, 2} + ]}, + # MachineStatus machineStatus: enum MachineStatus {RUNNING, FINISHED, ERRORED, TOO_FAR} + {:uint, 256} + ]}, + # ExecutionState afterState + {:tuple, + [ + # GlobalState globalState + {:tuple, + [ + # bytes32[2] bytes32Values + {:array, {:bytes, 32}, 2}, + # uint64[2] u64Values + {:array, {:uint, 64}, 2} + ]}, + # MachineStatus machineStatus: enum MachineStatus {RUNNING, FINISHED, ERRORED, TOO_FAR} + {:uint, 256} + ]}, + # uint64 numBlocks + {:uint, 64} + ]}, + {:bytes, 32}, + {:bytes, 32}, + {:uint, 256} + ] + + @doc """ + Returns 32-byte signature of the event `NodeCreated` + """ + @spec node_created() :: <<_::528>> + def node_created, do: @node_created + + @spec node_created_unindexed_params() :: [atom() | {atom(), non_neg_integer()}] + def node_created_unindexed_params, do: @node_created_unindexed_params + + # keccak256("SetValidKeyset(bytes32,bytes)") + @set_valid_keyset "0xabca9b7986bc22ad0160eb0cb88ae75411eacfba4052af0b457a9335ef655722" + @set_valid_keyset_unindexed_params [:bytes] + + @doc """ + Returns 32-byte signature of the event `SetValidKeyset` + """ + @spec set_valid_keyset() :: <<_::528>> + def set_valid_keyset, do: @set_valid_keyset + + @spec set_valid_keyset_unindexed_params() :: [atom() | {atom(), non_neg_integer()}] + def set_valid_keyset_unindexed_params, do: @set_valid_keyset_unindexed_params + + # keccak256("SequencerBatchDelivered(uint256,bytes32,bytes32,bytes32,uint256,(uint64,uint64,uint64,uint64),uint8)") + @sequencer_batch_delivered "0x7394f4a19a13c7b92b5bb71033245305946ef78452f7b4986ac1390b5df4ebd7" + + @doc """ + Returns 32-byte signature of the event `SequencerBatchDelivered` + """ + @spec sequencer_batch_delivered() :: <<_::528>> + def sequencer_batch_delivered, do: @sequencer_batch_delivered + + # keccak256("SendRootUpdated(bytes32,bytes32)") + @send_root_updated "0xb4df3847300f076a369cd76d2314b470a1194d9e8a6bb97f1860aee88a5f6748" + + @doc """ + Returns 32-byte signature of the event `SendRootUpdated` + """ + @spec send_root_updated() :: <<_::528>> + def send_root_updated, do: @send_root_updated + + # keccak256("OutBoxTransactionExecuted(address,address,uint256,uint256)") + @outbox_transaction_executed "0x20af7f3bbfe38132b8900ae295cd9c8d1914be7052d061a511f3f728dab18964" + @outbox_transaction_executed_unindexed_params [{:uint, 256}] + + @doc """ + Returns 32-byte signature of the event `OutBoxTransactionExecuted` + """ + @spec outbox_transaction_executed() :: <<_::528>> + def outbox_transaction_executed, do: @outbox_transaction_executed + + @spec outbox_transaction_executed_unindexed_params() :: [atom() | {atom(), non_neg_integer()}] + def outbox_transaction_executed_unindexed_params, do: @outbox_transaction_executed_unindexed_params + + # keccak256("MessageDelivered(uint256,bytes32,address,uint8,address,bytes32,uint256,uint64)") + @message_delivered "0x5e3c1311ea442664e8b1611bfabef659120ea7a0a2cfc0667700bebc69cbffe1" + @message_delivered_unindexed_params [ + :address, + {:uint, 8}, + :address, + {:bytes, 32}, + {:uint, 256}, + {:uint, 64} + ] + + @doc """ + Returns 32-byte signature of the event `MessageDelivered` + """ + @spec message_delivered() :: <<_::528>> + def message_delivered, do: @message_delivered + + @spec message_delivered_unindexed_params() :: [atom() | {atom(), non_neg_integer()}] + def message_delivered_unindexed_params, do: @message_delivered_unindexed_params +end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/besu.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/besu.ex index 0c54fecd5848..45b8d545e3ac 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/besu.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/besu.ex @@ -3,7 +3,9 @@ defmodule EthereumJSONRPC.Besu do @moduledoc """ Ethereum JSONRPC methods that are only supported by [Besu](https://besu.hyperledger.org/en/stable/Reference/API-Methods). """ - import EthereumJSONRPC, only: [id_to_params: 1, integer_to_quantity: 1, json_rpc: 2] + require Logger + + import EthereumJSONRPC, only: [id_to_params: 1, integer_to_quantity: 1, json_rpc: 2, request: 1] alias EthereumJSONRPC.Besu.Traces alias EthereumJSONRPC.{FetchedBeneficiaries, PendingTransaction, TraceReplayBlockTransactions, Transaction} @@ -56,11 +58,24 @@ defmodule EthereumJSONRPC.Besu do end @impl EthereumJSONRPC.Variant - def fetch_transaction_raw_traces(transaction_params, json_rpc_named_arguments) do - TraceReplayBlockTransactions.fetch_transaction_raw_traces(transaction_params, json_rpc_named_arguments) + def fetch_transaction_raw_traces(%{hash: transaction_hash}, json_rpc_named_arguments) do + request = trace_transaction_request(%{id: 0, hash_data: to_string(transaction_hash)}) + + case json_rpc(request, json_rpc_named_arguments) do + {:ok, response} -> + {:ok, response} + + {:error, error} -> + Logger.error(inspect(error)) + {:error, error} + end end defp block_numbers_to_params_list(block_numbers) when is_list(block_numbers) do Enum.map(block_numbers, &%{block_quantity: integer_to_quantity(&1)}) end + + defp trace_transaction_request(%{id: id, hash_data: hash_data}) do + request(%{id: id, method: "trace_transaction", params: [hash_data]}) + end end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/decode_error.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/decode_error.ex index 390f1fcd5f1e..57c62262dd48 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/decode_error.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/decode_error.ex @@ -11,8 +11,8 @@ defmodule EthereumJSONRPC.DecodeError do Ethereum JSONRPC request whose `EthereumJSONRPC.DecodeError.Response` had a decode error. """ - @enforce_keys [:url, :body] - defstruct [:url, :body] + @enforce_keys [:url, :body, :headers] + defstruct [:url, :body, :headers] end defmodule Response do @@ -40,7 +40,7 @@ defmodule EthereumJSONRPC.DecodeError do @impl Exception def message( %EthereumJSONRPC.DecodeError{ - request: %EthereumJSONRPC.DecodeError.Request{url: request_url, body: request_body}, + request: %EthereumJSONRPC.DecodeError.Request{url: request_url, body: request_body, headers: headers}, response: %EthereumJSONRPC.DecodeError.Response{status_code: response_status_code, body: response_body} } = decode_error ) do @@ -65,6 +65,8 @@ defmodule EthereumJSONRPC.DecodeError do body: #{truncated_request_body} + headers: #{inspect(headers)} + response: status code: #{response_status_code} diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex index 0335c5e9af12..41e0023e7737 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/http.ex @@ -28,7 +28,8 @@ defmodule EthereumJSONRPC.HTTP do http_options = Keyword.fetch!(options, :http_options) with {:ok, %{body: body, status_code: code}} <- http.json_rpc(url, json, headers(), http_options), - {:ok, json} <- decode_json(request: [url: url, body: json], response: [status_code: code, body: body]), + {:ok, json} <- + decode_json(request: [url: url, body: json, headers: headers()], response: [status_code: code, body: body]), {:ok, response} <- handle_response(json, code) do {:ok, response} else @@ -77,7 +78,10 @@ defmodule EthereumJSONRPC.HTTP do {:ok, %{body: body, status_code: status_code}} -> with {:ok, decoded_body} <- - decode_json(request: [url: url, body: json], response: [status_code: status_code, body: body]) do + decode_json( + request: [url: url, body: json, headers: headers()], + response: [status_code: status_code, body: body] + ) do chunked_json_rpc(tail, options, [decoded_body | decoded_response_bodies]) end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex index 2b7410ebe9a4..64c3ea373c3c 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/receipt.ex @@ -431,43 +431,7 @@ defmodule EthereumJSONRPC.Receipt do end end - # fixes for latest ganache JSON RPC - defp entry_to_elixir({key, _}) when key in ~w(r s v) do + defp entry_to_elixir({_, _}) do :ignore end - - # Nethermind field - defp entry_to_elixir({"error", _}) do - :ignore - end - - # Arbitrum fields - defp entry_to_elixir({key, _}) when key in ~w(returnData returnCode feeStats l1BlockNumber) do - :ignore - end - - # Metis fields - defp entry_to_elixir({key, _}) when key in ~w(l1GasUsed l1GasPrice l1FeeScalar l1Fee) do - :ignore - end - - # GoQuorum specific transaction receipt fields - defp entry_to_elixir({key, _}) when key in ~w(isPrivacyMarkerTransaction) do - :ignore - end - - # Optimism specific transaction receipt fields - defp entry_to_elixir({key, _}) when key in ~w(depositNonce depositReceiptVersion) do - :ignore - end - - # zkSync specific transaction receipt fields - defp entry_to_elixir({key, _}) - when key in ~w(l1BatchNumber l1BatchTxIndex l2ToL1Logs) do - :ignore - end - - defp entry_to_elixir({key, value}) do - {:error, {:unknown_key, %{key: key, value: value}}} - end end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex index 79748a2aee69..c3aeb291ed44 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/trace_replay_block_transactions.ex @@ -258,6 +258,26 @@ defmodule EthereumJSONRPC.TraceReplayBlockTransactions do {:error, annotated_error} end + defp trace_replay_transaction_response_to_first_trace(%{id: id, result: error_result}, id_to_params) + when is_map(id_to_params) do + %{ + block_hash: block_hash, + block_number: block_number, + hash_data: transaction_hash, + transaction_index: transaction_index + } = Map.fetch!(id_to_params, id) + + annotated_error = %{ + "blockHash" => block_hash, + "blockNumber" => block_number, + "transactionIndex" => transaction_index, + "transactionHash" => transaction_hash, + "result" => error_result + } + + {:error, annotated_error} + end + defp trace_replay_transaction_requests(id_to_params) when is_map(id_to_params) do Enum.map(id_to_params, fn {id, %{hash_data: hash_data}} -> trace_replay_transaction_request(%{id: id, hash_data: hash_data}) diff --git a/apps/ethereum_jsonrpc/mix.exs b/apps/ethereum_jsonrpc/mix.exs index cbc0b829abe9..47eb43d15642 100644 --- a/apps/ethereum_jsonrpc/mix.exs +++ b/apps/ethereum_jsonrpc/mix.exs @@ -15,7 +15,7 @@ defmodule EthereumJSONRPC.MixProject do plt_add_apps: [:mix], ignore_warnings: "../../.dialyzer-ignore" ], - elixir: "~> 1.13", + elixir: "~> 1.17", elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", preferred_cli_env: [ @@ -23,7 +23,7 @@ defmodule EthereumJSONRPC.MixProject do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.9.2" + version: "6.10.1" ] end diff --git a/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/receipt_test.exs b/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/receipt_test.exs index 0c7feed17f29..763c4b62d1eb 100644 --- a/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/receipt_test.exs +++ b/apps/ethereum_jsonrpc/test/ethereum_jsonrpc/receipt_test.exs @@ -6,23 +6,13 @@ defmodule EthereumJSONRPC.ReceiptTest do doctest Receipt describe "to_elixir/1" do - test "with new key raise ArgumentError with full receipt" do - assert_raise ArgumentError, - """ - Could not convert receipt to elixir - - Receipt: - %{"new_key" => "new_value", "transactionHash" => "0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060"} - - Errors: - {:unknown_key, %{value: "new_value", key: "new_key"}} - """, - fn -> - Receipt.to_elixir(%{ - "new_key" => "new_value", - "transactionHash" => "0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060" - }) - end + test "ignores new key" do + assert Receipt.to_elixir(%{ + "new_key" => "new_value", + "transactionHash" => "0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060" + }) == %{ + "transactionHash" => "0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060" + } end # Regression test for https://github.com/poanetwork/blockscout/issues/638 diff --git a/apps/explorer/config/config.exs b/apps/explorer/config/config.exs index a1977ac738c1..ffb83351059e 100644 --- a/apps/explorer/config/config.exs +++ b/apps/explorer/config/config.exs @@ -133,6 +133,7 @@ config :explorer, Explorer.Migrator.SanitizeReplacedTransactions, enabled: true config :explorer, Explorer.Migrator.ReindexInternalTransactionsWithIncompatibleStatus, enabled: true config :explorer, Explorer.Migrator.SanitizeDuplicatedLogIndexLogs, enabled: true config :explorer, Explorer.Migrator.RefetchContractCodes, enabled: true +config :explorer, Explorer.Migrator.BackfillMultichainSearchDB, enabled: true config :explorer, Explorer.Chain.Fetcher.CheckBytecodeMatchingOnDemand, enabled: true diff --git a/apps/explorer/config/dev/besu.exs b/apps/explorer/config/dev/besu.exs index 598ce1459b19..851c422bd51f 100644 --- a/apps/explorer/config/dev/besu.exs +++ b/apps/explorer/config/dev/besu.exs @@ -12,29 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/dev/erigon.exs b/apps/explorer/config/dev/erigon.exs index ce6108e4543f..fc3faf87094c 100644 --- a/apps/explorer/config/dev/erigon.exs +++ b/apps/explorer/config/dev/erigon.exs @@ -12,29 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/dev/filecoin.exs b/apps/explorer/config/dev/filecoin.exs index 2ebd3941ec36..9fcf631e1b29 100644 --- a/apps/explorer/config/dev/filecoin.exs +++ b/apps/explorer/config/dev/filecoin.exs @@ -12,33 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_HTTP_URLS", - "ETHEREUM_JSONRPC_HTTP_URL", - "http://localhost:1234/rpc/v1" - ), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:1234/rpc/v1" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:1234/rpc/v1" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:1234/rpc/v1"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:1234/rpc/v1"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:1234/rpc/v1"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, trace_block: :trace diff --git a/apps/explorer/config/dev/ganache.exs b/apps/explorer/config/dev/ganache.exs index 8c3c678311eb..37f087fdfe29 100644 --- a/apps/explorer/config/dev/ganache.exs +++ b/apps/explorer/config/dev/ganache.exs @@ -12,21 +12,10 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:7545"), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:7545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:7545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:7545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call ], diff --git a/apps/explorer/config/dev/geth.exs b/apps/explorer/config/dev/geth.exs index 6210a08d583c..ebf1a7aa9536 100644 --- a/apps/explorer/config/dev/geth.exs +++ b/apps/explorer/config/dev/geth.exs @@ -12,29 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, debug_traceTransaction: :trace, diff --git a/apps/explorer/config/dev/nethermind.exs b/apps/explorer/config/dev/nethermind.exs index 83bdbc3a899c..deaa3bcbd420 100644 --- a/apps/explorer/config/dev/nethermind.exs +++ b/apps/explorer/config/dev/nethermind.exs @@ -12,29 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/dev/rsk.exs b/apps/explorer/config/dev/rsk.exs index 6e2d4f391222..afdb6e17627e 100644 --- a/apps/explorer/config/dev/rsk.exs +++ b/apps/explorer/config/dev/rsk.exs @@ -12,29 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/prod/besu.exs b/apps/explorer/config/prod/besu.exs index a7b8c6ff6e73..2493d3021123 100644 --- a/apps/explorer/config/prod/besu.exs +++ b/apps/explorer/config/prod/besu.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/prod/erigon.exs b/apps/explorer/config/prod/erigon.exs index d2a8a51e188c..0275a0a1067e 100644 --- a/apps/explorer/config/prod/erigon.exs +++ b/apps/explorer/config/prod/erigon.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/prod/filecoin.exs b/apps/explorer/config/prod/filecoin.exs index a47c57517a1a..22c3862f7482 100644 --- a/apps/explorer/config/prod/filecoin.exs +++ b/apps/explorer/config/prod/filecoin.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, trace_block: :trace diff --git a/apps/explorer/config/prod/ganache.exs b/apps/explorer/config/prod/ganache.exs index d4a0aff64a78..032d301c1ba4 100644 --- a/apps/explorer/config/prod/ganache.exs +++ b/apps/explorer/config/prod/ganache.exs @@ -12,15 +12,10 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call ], diff --git a/apps/explorer/config/prod/geth.exs b/apps/explorer/config/prod/geth.exs index 2630943455df..745a74453dc6 100644 --- a/apps/explorer/config/prod/geth.exs +++ b/apps/explorer/config/prod/geth.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, debug_traceTransaction: :trace, diff --git a/apps/explorer/config/prod/nethermind.exs b/apps/explorer/config/prod/nethermind.exs index 7232bad8c2c7..3147355e90e3 100644 --- a/apps/explorer/config/prod/nethermind.exs +++ b/apps/explorer/config/prod/nethermind.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/prod/rsk.exs b/apps/explorer/config/prod/rsk.exs index 4f63fa6d4dee..71782fd05901 100644 --- a/apps/explorer/config/prod/rsk.exs +++ b/apps/explorer/config/prod/rsk.exs @@ -12,18 +12,12 @@ config :explorer, transport: EthereumJSONRPC.HTTP, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/explorer/config/runtime/test.exs b/apps/explorer/config/runtime/test.exs index f4be94ed7293..a6777d33ef2d 100644 --- a/apps/explorer/config/runtime/test.exs +++ b/apps/explorer/config/runtime/test.exs @@ -54,6 +54,7 @@ config :explorer, Explorer.Migrator.SanitizeReplacedTransactions, enabled: false config :explorer, Explorer.Migrator.ReindexInternalTransactionsWithIncompatibleStatus, enabled: false config :explorer, Explorer.Migrator.SanitizeDuplicatedLogIndexLogs, enabled: false config :explorer, Explorer.Migrator.RefetchContractCodes, enabled: false +config :explorer, Explorer.Migrator.BackfillMultichainSearchDB, enabled: false config :explorer, realtime_events_sender: Explorer.Chain.Events.SimpleSender diff --git a/apps/explorer/lib/explorer/access_helper.ex b/apps/explorer/lib/explorer/access_helper.ex index a0794046cdd0..5a0ae37001ce 100644 --- a/apps/explorer/lib/explorer/access_helper.ex +++ b/apps/explorer/lib/explorer/access_helper.ex @@ -3,29 +3,55 @@ defmodule Explorer.AccessHelper do Helper to restrict access to some pages filtering by address """ - def restricted_access?(address_hash, params) do - restricted_list_var = Application.get_env(:explorer, :restricted_list) - restricted_list = (restricted_list_var && String.split(restricted_list_var, ",")) || [] + alias Explorer.Chain + alias Explorer.Chain.Fetcher.AddressesBlacklist - if Enum.empty?(restricted_list) do - {:ok, false} - else - formatted_restricted_list = - restricted_list - |> Enum.map(fn addr -> - String.downcase(addr) - end) + @doc """ + Checks if access is restricted based on the provided address_hash_string and map with request params. - formatted_address_hash = String.downcase(address_hash) + ## Parameters + - `binary()`: A binary input, representing address_hash_string to check for restricted access. + - `nil | map()`: An optional map that may contain admin keys to bypass access restrictions. - address_restricted = - formatted_restricted_list - |> Enum.member?(formatted_address_hash) + ## Returns + - `{:ok, false}`: If access is not restricted. + - `{:restricted_access, true}`: If access is restricted. + """ + @spec restricted_access?(binary(), nil | map()) :: {:ok, false} | {:restricted_access, true} + def restricted_access?("", _), do: {:ok, false} + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + def restricted_access?(address_hash_string, params) do + restricted_list_var = Application.get_env(:explorer, :addresses_blacklist) + addresses_blacklist = (restricted_list_var && String.split(restricted_list_var, ",")) || [] + + key = if params && Map.has_key?(params, "key"), do: Map.get(params, "key"), else: nil + correct_key = key && key == Application.get_env(:explorer, :addresses_blacklist_key) + + {:ok, address_hash} = Chain.string_to_address_hash(address_hash_string) + blacklisted? = AddressesBlacklist.blacklisted?(address_hash) + + cond do + blacklisted? -> + if correct_key, do: {:ok, false}, else: {:restricted_access, true} + + Enum.empty?(addresses_blacklist) -> + {:ok, false} + + true -> + formatted_restricted_list = + addresses_blacklist + |> Enum.map(fn addr -> + String.downcase(addr) + end) + + formatted_address_hash = String.downcase(address_hash_string) - key = if params && Map.has_key?(params, "key"), do: Map.get(params, "key"), else: nil - correct_key = key && key == Application.get_env(:explorer, :restricted_list_key) + address_restricted = + formatted_restricted_list + |> Enum.member?(formatted_address_hash) - if address_restricted && !correct_key, do: {:restricted_access, true}, else: {:ok, false} + if address_restricted && !correct_key, do: {:restricted_access, true}, else: {:ok, false} end end end diff --git a/apps/explorer/lib/explorer/account/api/key.ex b/apps/explorer/lib/explorer/account/api/key.ex index 8b20283a16d7..2f86c1fabde2 100644 --- a/apps/explorer/lib/explorer/account/api/key.ex +++ b/apps/explorer/lib/explorer/account/api/key.ex @@ -64,31 +64,27 @@ defmodule Explorer.Account.Api.Key do @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} def create(%{identity_id: identity_id} = attrs) do Multi.new() - |> Multi.run(:acquire_identity, fn repo, _changes -> - identity_query = from(identity in Identity, where: identity.id == ^identity_id, lock: "FOR UPDATE") - - case repo.one(identity_query) do - nil -> - {:error, - %__MODULE__{} - |> changeset(Map.put(attrs, :value, generate_api_key())) - |> add_error(:identity_id, @user_not_found, - constraint: :foreign, - constraint_name: "account_api_keys_identity_id_fkey" - )} - - identity -> - {:ok, identity} - end - end) + |> Identity.acquire_with_lock(identity_id) |> Multi.insert(:api_key, fn _ -> %__MODULE__{} |> changeset(Map.put(attrs, :value, generate_api_key())) end) |> Repo.account_repo().transaction() |> case do - {:ok, %{api_key: api_key}} -> {:ok, api_key} - {:error, _failed_operation, error, _changes} -> {:error, error} + {:ok, %{api_key: api_key}} -> + {:ok, api_key} + + {:error, :acquire_identity, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(Map.put(attrs, :value, generate_api_key())) + |> add_error(:identity_id, @user_not_found, + constraint: :foreign, + constraint_name: "account_api_keys_identity_id_fkey" + )} + + {:error, _failed_operation, error, _changes} -> + {:error, error} end end diff --git a/apps/explorer/lib/explorer/account/custom_abi.ex b/apps/explorer/lib/explorer/account/custom_abi.ex index 56f8b4bfad93..3cc9eae51c93 100644 --- a/apps/explorer/lib/explorer/account/custom_abi.ex +++ b/apps/explorer/lib/explorer/account/custom_abi.ex @@ -15,6 +15,8 @@ defmodule Explorer.Account.CustomABI do @max_abis_per_account 15 + @user_not_found "User not found" + typed_schema "account_custom_abis" do field(:abi, {:array, :map}, null: false) field(:given_abi, :string, virtual: true) @@ -37,7 +39,7 @@ defmodule Explorer.Account.CustomABI do |> validate_required(@attrs, message: "Required") |> validate_custom_abi() |> check_smart_contract_address() - |> foreign_key_constraint(:identity_id, message: "User not found") + |> foreign_key_constraint(:identity_id, message: @user_not_found) |> put_hashed_fields() |> unique_constraint([:identity_id, :address_hash_hash], message: "Custom ABI for this address has already been added before" @@ -149,10 +151,62 @@ defmodule Explorer.Account.CustomABI do def custom_abi_count_constraint(%Changeset{} = custom_abi), do: custom_abi + @doc """ + Creates a new custom ABI entry for a smart contract address. + + The function performs several validations including checking the ABI format, + verifying the smart contract address, and ensuring the user hasn't exceeded their + ABI limit. The operation is executed within a database transaction that includes + identity verification. + + ## Parameters + - `attrs`: A map containing: + - `identity_id`: The ID of the user creating the ABI + - `abi`: The ABI specification as a JSON string or list of maps + - `name`: The name for this custom ABI entry + - `address_hash`: The smart contract address this ABI corresponds to + + ## Returns + - `{:ok, custom_abi}` if the creation is successful + - `{:error, changeset}` if: + - The identity doesn't exist + - The ABI format is invalid + - The address is not a smart contract + - The user has reached their ABI limit + - The ABI already exists for this address + - Required fields are missing + """ + @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} + def create(%{identity_id: identity_id} = attrs) do + Multi.new() + |> Identity.acquire_with_lock(identity_id) + |> Multi.insert(:custom_abi, fn _ -> + %__MODULE__{} + |> changeset(attrs) + end) + |> Repo.account_repo().transaction() + |> case do + {:ok, %{custom_abi: custom_abi}} -> + {:ok, custom_abi} + + {:error, :acquire_identity, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(attrs) + |> add_error(:identity_id, @user_not_found, + constraint: :foreign, + constraint_name: "account_custom_abis_identity_id_fkey" + )} + + {:error, _failed_operation, error, _changes} -> + {:error, error} + end + end + def create(attrs) do - %__MODULE__{} - |> changeset(attrs) - |> Repo.account_repo().insert() + {:error, + %__MODULE__{} + |> changeset(attrs)} end def custom_abis_by_identity_id_query(id) when not is_nil(id) do diff --git a/apps/explorer/lib/explorer/account/identity.ex b/apps/explorer/lib/explorer/account/identity.ex index 7f3739dc6557..c5ab81b9c339 100644 --- a/apps/explorer/lib/explorer/account/identity.ex +++ b/apps/explorer/lib/explorer/account/identity.ex @@ -224,6 +224,38 @@ defmodule Explorer.Account.Identity do end) end + @doc """ + Adds an operation to acquire and lock an account identity record in the database. + + This operation performs a SELECT FOR UPDATE on the identity record, which prevents + concurrent modifications of the record until the transaction is committed or rolled + back. + + ## Parameters + - `multi`: An Ecto.Multi struct representing a series of database operations + - `identity_id`: The ID of the account identity to lock + + ## Returns + - An updated Ecto.Multi struct with the `:acquire_identity` operation added. The + operation will return: + - `{:ok, identity}` if the identity is found and locked successfully + - `{:error, :not_found}` if no identity exists with the given ID + """ + @spec acquire_with_lock(Multi.t(), integer()) :: Multi.t() + def acquire_with_lock(multi, identity_id) do + Multi.run(multi, :acquire_identity, fn repo, _ -> + identity_query = from(identity in __MODULE__, where: identity.id == ^identity_id, lock: "FOR UPDATE") + + case repo.one(identity_query) do + nil -> + {:error, :not_found} + + identity -> + {:ok, identity} + end + end) + end + defp session_info(auth, identity) do if email_verified_from_auth(auth) do %{watchlists: [watchlist | _]} = Repo.account_repo().preload(identity, :watchlists) diff --git a/apps/explorer/lib/explorer/account/notifier/email.ex b/apps/explorer/lib/explorer/account/notifier/email.ex index 8f48c05b6e73..b1f9606936bf 100644 --- a/apps/explorer/lib/explorer/account/notifier/email.ex +++ b/apps/explorer/lib/explorer/account/notifier/email.ex @@ -35,8 +35,6 @@ defmodule Explorer.Account.Notifier.Email do |> add_dynamic_field("block_number", notification.block_number) |> add_dynamic_field("amount", amount(notification)) |> add_dynamic_field("name", notification.name) - # todo: keep next line for compatibility with old version of SendGrid template. Remove it when the changes released and Sendgrid template updated. - |> add_dynamic_field("tx_fee", notification.transaction_fee) |> add_dynamic_field("transaction_fee", notification.transaction_fee) |> add_dynamic_field("direction", direction(notification)) |> add_dynamic_field("method", notification.method) diff --git a/apps/explorer/lib/explorer/account/notifier/notify.ex b/apps/explorer/lib/explorer/account/notifier/notify.ex index 1214b865d51b..d2a41d422e97 100644 --- a/apps/explorer/lib/explorer/account/notifier/notify.ex +++ b/apps/explorer/lib/explorer/account/notifier/notify.ex @@ -5,7 +5,7 @@ defmodule Explorer.Account.Notifier.Notify do alias Explorer.Account.Notifier.{Email, ForbiddenAddress, Summary} alias Explorer.Account.{WatchlistAddress, WatchlistNotification} - alias Explorer.Chain.{TokenTransfer, Transaction} + alias Explorer.Chain.Transaction alias Explorer.{Mailer, Repo} require Logger @@ -20,20 +20,16 @@ defmodule Explorer.Account.Notifier.Notify do Enum.map(transactions, fn transaction -> process(transaction) end) end - defp process(%TokenTransfer{} = transfer) do - Logger.debug(transfer, fetcher: :account) - - transfer - |> Summary.process() - |> Enum.map(fn summary -> notify_watchlists(summary) end) - end - defp process(%Transaction{} = transaction) do - Logger.debug(transaction, fetcher: :account) - - transaction - |> Summary.process() - |> Enum.map(fn summary -> notify_watchlists(summary) end) + if DateTime.after?(transaction.block_timestamp, DateTime.add(DateTime.utc_now(), -1, :day)) do + Logger.debug(transaction, fetcher: :account) + + transaction + |> Summary.process() + |> Enum.map(fn summary -> notify_watchlists(summary) end) + else + nil + end end defp process(_), do: nil diff --git a/apps/explorer/lib/explorer/account/notifier/summary.ex b/apps/explorer/lib/explorer/account/notifier/summary.ex index d6231cfe62b6..5ccfef8e6a1b 100644 --- a/apps/explorer/lib/explorer/account/notifier/summary.ex +++ b/apps/explorer/lib/explorer/account/notifier/summary.ex @@ -45,18 +45,6 @@ defmodule Explorer.Account.Notifier.Summary do end) end - def process(%Chain.TokenTransfer{} = transfer) do - preloaded_transfer = preload(transfer) - - summary = fetch_summary(preloaded_transfer.transaction, preloaded_transfer) - - if summary != :nothing do - [summary] - else - [] - end - end - def process(_), do: nil def handle_collection(_transaction, []), do: [] @@ -228,13 +216,7 @@ defmodule Explorer.Account.Notifier.Summary do fee end - def preload(%Chain.Transaction{} = transaction) do - Repo.preload(transaction, [:internal_transactions, token_transfers: :token]) - end - - def preload(%Chain.TokenTransfer{} = transfer) do - Repo.preload(transfer, [:transaction, :token]) + defp preload(%Chain.Transaction{} = transaction) do + Repo.preload(transaction, token_transfers: :token) end - - def preload(_), do: nil end diff --git a/apps/explorer/lib/explorer/account/public_tags_request.ex b/apps/explorer/lib/explorer/account/public_tags_request.ex index 9d38c07a297c..a0e37a5ead7c 100644 --- a/apps/explorer/lib/explorer/account/public_tags_request.ex +++ b/apps/explorer/lib/explorer/account/public_tags_request.ex @@ -19,6 +19,8 @@ defmodule Explorer.Account.PublicTagsRequest do @max_tags_per_request 2 @max_tag_length 35 + @user_not_found "User not found" + typed_schema "account_public_tags_requests" do field(:company, :string) field(:website, :string) @@ -60,7 +62,7 @@ defmodule Explorer.Account.PublicTagsRequest do |> validate_format(:email, ~r/^[A-Z0-9._%+-]+@[A-Z0-9-]+.+.[A-Z]{2,4}$/i, message: "is invalid") |> validate_length(:addresses, min: 1, max: @max_addresses_per_request) |> extract_and_validate_addresses() - |> foreign_key_constraint(:identity_id) + |> foreign_key_constraint(:identity_id, message: @user_not_found) |> public_tags_request_count_constraint() |> public_tags_request_time_interval_uniqueness() end @@ -70,11 +72,64 @@ defmodule Explorer.Account.PublicTagsRequest do |> cast(attrs, @attrs ++ @required_attrs) end + @doc """ + Creates a new public tags request within a database transaction. + + The creation process involves verifying the existence of the associated identity + and ensuring data consistency through a database lock. The transaction prevents + concurrent modifications of the same identity record. + + ## Parameters + - `attrs`: Map containing the following fields: + - `:identity_id`: Required. The ID of the identity associated with the request + - `:company`: Optional. The company name + - `:website`: Optional. The company's website + - `:tags`: Required. The requested tags + - `:addresses`: Required. List of blockchain addresses + - `:description`: Optional. Description of the request + - `:additional_comment`: Required. Additional information about the request + - `:request_type`: Required. The type of the request + - `:is_owner`: Optional. Boolean indicating ownership (defaults to true) + - `:remove_reason`: Optional. Reason for tag removal if applicable + - `:request_id`: Optional. External request identifier + - `:full_name`: Required. Encrypted full name of the requester + - `:email`: Required. Encrypted email of the requester + + ## Returns + - `{:ok, public_tags_request}` - Returns the created public tags request + - `{:error, changeset}` - Returns a changeset with errors if: + - The identity doesn't exist + - The provided data is invalid + - Required fields are missing + """ + @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} + def create(%{identity_id: identity_id} = attrs) do + Multi.new() + |> Identity.acquire_with_lock(identity_id) + |> Multi.insert(:public_tags_request, fn _ -> + %__MODULE__{} + |> changeset(Map.put(attrs, :request_type, "add")) + end) + |> Repo.account_repo().transaction() + |> case do + {:ok, %{public_tags_request: public_tags_request}} -> + {:ok, public_tags_request} |> AirTable.submit() + + {:error, :acquire_identity, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(Map.put(attrs, :request_type, "add")) + |> add_error(:identity_id, @user_not_found, + constraint: :foreign, + constraint_name: "account_public_tags_requests_identity_id_fkey" + )} + end + end + def create(attrs) do - %__MODULE__{} - |> changeset(Map.put(attrs, :request_type, "add")) - |> Repo.account_repo().insert() - |> AirTable.submit() + {:error, + %__MODULE__{} + |> changeset(Map.put(attrs, :request_type, "add"))} end defp trim_empty_addresses(%{addresses: addresses} = attrs) when is_list(addresses) do diff --git a/apps/explorer/lib/explorer/account/tag_address.ex b/apps/explorer/lib/explorer/account/tag_address.ex index c89bfb97d71e..caea1f365f13 100644 --- a/apps/explorer/lib/explorer/account/tag_address.ex +++ b/apps/explorer/lib/explorer/account/tag_address.ex @@ -14,6 +14,8 @@ defmodule Explorer.Account.TagAddress do import Explorer.Chain, only: [hash_to_lower_case_string: 1] + @user_not_found "User not found" + typed_schema "account_tag_addresses" do field(:address_hash_hash, Cloak.Ecto.SHA256) :: binary() | nil field(:name, Explorer.Encrypted.Binary, null: false) @@ -39,15 +41,61 @@ defmodule Explorer.Account.TagAddress do |> validate_required(@attrs, message: "Required") |> validate_length(:name, min: 1, max: 35) |> put_hashed_fields() + |> foreign_key_constraint(:identity_id, message: @user_not_found) |> unique_constraint([:identity_id, :address_hash_hash], message: "Address tag already exists") |> check_existence_or_create_address() |> tag_address_count_constraint() end + @doc """ + Creates a new tag address record in a transactional context. + + Attempts to create a tag address while ensuring the referenced identity exists by + acquiring a database lock. The function handles both the case where the identity + ID is provided and where it is missing. + + ## Parameters + - `attrs`: A map of attributes that must include: + - `:identity_id`: The ID of the associated identity + + ## Returns + - `{:ok, tag_address}` - The created tag address record + - `{:error, changeset}` - A changeset with validation errors if: + - The identity doesn't exist + - The identity ID is missing + - Other validation constraints are not met + """ + @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} + def create(%{identity_id: identity_id} = attrs) do + Multi.new() + |> Identity.acquire_with_lock(identity_id) + |> Multi.insert(:tag_address, fn _ -> + %__MODULE__{} + |> changeset(attrs) + end) + |> Repo.account_repo().transaction() + |> case do + {:ok, %{tag_address: tag_address}} -> + {:ok, tag_address} + + {:error, :acquire_identity, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(attrs) + |> add_error(:identity_id, @user_not_found, + constraint: :foreign, + constraint_name: "account_tag_addresses_identity_id_fkey" + )} + + {:error, _failed_operation, error, _changes} -> + {:error, error} + end + end + def create(attrs) do - %__MODULE__{} - |> changeset(attrs) - |> Repo.account_repo().insert() + {:error, + %__MODULE__{} + |> changeset(attrs)} end defp put_hashed_fields(changeset) do diff --git a/apps/explorer/lib/explorer/account/tag_transaction.ex b/apps/explorer/lib/explorer/account/tag_transaction.ex index 24b14fd5c996..893475894765 100644 --- a/apps/explorer/lib/explorer/account/tag_transaction.ex +++ b/apps/explorer/lib/explorer/account/tag_transaction.ex @@ -13,6 +13,8 @@ defmodule Explorer.Account.TagTransaction do alias Explorer.Chain.Hash import Explorer.Chain, only: [hash_to_lower_case_string: 1] + @user_not_found "User not found" + typed_schema "account_tag_transactions" do field(:transaction_hash_hash, Cloak.Ecto.SHA256) :: binary() | nil field(:name, Explorer.Encrypted.Binary, null: false) @@ -37,16 +39,62 @@ defmodule Explorer.Account.TagTransaction do |> cast(attrs, @attrs) |> validate_required(@attrs, message: "Required") |> validate_length(:name, min: 1, max: 35) + |> foreign_key_constraint(:identity_id, message: @user_not_found) |> put_hashed_fields() |> unique_constraint([:identity_id, :transaction_hash_hash], message: "Transaction tag already exists") |> tag_transaction_count_constraint() |> check_transaction_existence() end + @doc """ + Creates a new tag transaction record in a transactional context. + + Ensures data consistency by acquiring a lock on the associated identity record + before creating the tag transaction. The operation either succeeds completely or + fails without side effects. + + ## Parameters + - `attrs`: A map of attributes that must include: + - `:identity_id`: The ID of the associated identity + + ## Returns + - `{:ok, tag_transaction}` - Successfully created tag transaction record + - `{:error, changeset}` - A changeset with errors if: + - The identity doesn't exist + - The identity ID is missing from the attributes + - The changeset validation fails + """ + @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} + def create(%{identity_id: identity_id} = attrs) do + Multi.new() + |> Identity.acquire_with_lock(identity_id) + |> Multi.insert(:tag_transaction, fn _ -> + %__MODULE__{} + |> changeset(attrs) + end) + |> Repo.account_repo().transaction() + |> case do + {:ok, %{tag_transaction: tag_transaction}} -> + {:ok, tag_transaction} + + {:error, :tag_transaction, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(attrs) + |> add_error(:identity_id, @user_not_found, + constraint: :foreign, + constraint_name: "account_tag_transactions_identity_id_fkey" + )} + + {:error, _failed_operation, error, _changes} -> + {:error, error} + end + end + def create(attrs) do - %__MODULE__{} - |> changeset(attrs) - |> Repo.account_repo().insert() + {:error, + %__MODULE__{} + |> changeset(attrs)} end defp put_hashed_fields(changeset) do diff --git a/apps/explorer/lib/explorer/account/watchlist.ex b/apps/explorer/lib/explorer/account/watchlist.ex index 86bc8141983a..bb32b0244b82 100644 --- a/apps/explorer/lib/explorer/account/watchlist.ex +++ b/apps/explorer/lib/explorer/account/watchlist.ex @@ -60,4 +60,34 @@ defmodule Explorer.Account.Watchlist do {:ok, repo.all(from(watchlist in __MODULE__, where: watchlist.identity_id in ^ids_to_merge))} end) end + + @doc """ + Adds an operation to acquire and lock a watchlist record in a database transaction. + + Performs a SELECT FOR UPDATE on the watchlist record to prevent concurrent + modifications until the transaction is committed or rolled back. + + ## Parameters + - `multi`: An Ecto.Multi struct containing a series of database operations + - `watchlist_id`: The ID of the watchlist to lock + + ## Returns + An updated Ecto.Multi struct with the `:acquire_watchlist` operation that will: + - Return `{:ok, watchlist}` if the watchlist is found and locked + - Return `{:error, :not_found}` if no watchlist exists with the given ID + """ + @spec acquire_with_lock(Multi.t(), integer()) :: Multi.t() + def acquire_with_lock(multi, watchlist_id) do + Multi.run(multi, :acquire_watchlist, fn repo, _ -> + watchlist_query = from(watchlist in __MODULE__, where: watchlist.id == ^watchlist_id, lock: "FOR UPDATE") + + case repo.one(watchlist_query) do + nil -> + {:error, :not_found} + + watchlist -> + {:ok, watchlist} + end + end) + end end diff --git a/apps/explorer/lib/explorer/account/watchlist_address.ex b/apps/explorer/lib/explorer/account/watchlist_address.ex index 1dddc02b41c3..356734713057 100644 --- a/apps/explorer/lib/explorer/account/watchlist_address.ex +++ b/apps/explorer/lib/explorer/account/watchlist_address.ex @@ -15,6 +15,8 @@ defmodule Explorer.Account.WatchlistAddress do import Explorer.Chain, only: [hash_to_lower_case_string: 1] + @watchlist_not_found "Watchlist not found" + typed_schema "account_watchlist_addresses" do field(:address_hash_hash, Cloak.Ecto.SHA256) :: binary() | nil field(:name, Explorer.Encrypted.Binary, null: false) @@ -59,6 +61,7 @@ defmodule Explorer.Account.WatchlistAddress do |> cast(attrs, @attrs) |> validate_length(:name, min: 1, max: 35) |> validate_required([:name, :address_hash, :watchlist_id], message: "Required") + |> foreign_key_constraint(:watchlist_id, message: @watchlist_not_found) |> put_hashed_fields() |> unique_constraint([:watchlist_id, :address_hash_hash], name: "unique_watchlist_id_address_hash_hash_index", @@ -74,10 +77,55 @@ defmodule Explorer.Account.WatchlistAddress do |> force_change(:address_hash_hash, hash_to_lower_case_string(get_field(changeset, :address_hash))) end + @doc """ + Creates a new watchlist address record in a transactional context. + + Ensures data consistency by acquiring a lock on the associated watchlist record + before creating the watchlist address. The operation either succeeds completely + or fails without side effects. + + ## Parameters + - `attrs`: A map of attributes that must include: + - `:watchlist_id`: The ID of the associated watchlist + + ## Returns + - `{:ok, watchlist_address}` - Successfully created watchlist address record + - `{:error, changeset}` - A changeset with errors if: + - The watchlist doesn't exist + - The watchlist ID is missing from the attributes + - The changeset validation fails + """ + @spec create(map()) :: {:ok, t()} | {:error, Changeset.t()} + def create(%{watchlist_id: watchlist_id} = attrs) do + Multi.new() + |> Watchlist.acquire_with_lock(watchlist_id) + |> Multi.insert(:watchlist_address, fn _ -> + %__MODULE__{} + |> changeset(attrs) + end) + |> Repo.account_repo().transaction() + |> case do + {:ok, %{watchlist_address: watchlist_address}} -> + {:ok, watchlist_address} + + {:error, :acquire_watchlist, :not_found, _changes} -> + {:error, + %__MODULE__{} + |> changeset(attrs) + |> add_error(:watchlist_id, @watchlist_not_found, + constraint: :foreign, + constraint_name: "account_watchlist_addresses_identity_id_fkey" + )} + + {:error, _failed_operation, changeset, _changes} -> + {:error, changeset} + end + end + def create(attrs) do - %__MODULE__{} - |> changeset(attrs) - |> Repo.account_repo().insert() + {:error, + %__MODULE__{} + |> changeset(attrs)} end def watchlist_address_count_constraint(%Changeset{changes: %{watchlist_id: watchlist_id}} = watchlist_address) do diff --git a/apps/explorer/lib/explorer/application.ex b/apps/explorer/lib/explorer/application.ex index 99350f00877a..3e672f329482 100644 --- a/apps/explorer/lib/explorer/application.ex +++ b/apps/explorer/lib/explorer/application.ex @@ -31,6 +31,7 @@ defmodule Explorer.Application do alias Explorer.Chain.Supply.RSK alias Explorer.Market.MarketHistoryCache + alias Explorer.MicroserviceInterfaces.MultichainSearch alias Explorer.Repo.PrometheusLogger @impl Application @@ -90,7 +91,11 @@ defmodule Explorer.Application do opts = [strategy: :one_for_one, name: Explorer.Supervisor, max_restarts: 1_000] - Supervisor.start_link(children, opts) + if Application.get_env(:nft_media_handler, :standalone_media_worker?) do + Supervisor.start_link([], opts) + else + Supervisor.start_link(children, opts) + end end defp configurable_children do @@ -130,7 +135,7 @@ defmodule Explorer.Application do configure(Explorer.Chain.Fetcher.CheckBytecodeMatchingOnDemand), configure(Explorer.Chain.Fetcher.FetchValidatorInfoOnDemand), configure(Explorer.TokenInstanceOwnerAddressMigration.Supervisor), - sc_microservice_configure(Explorer.Chain.Fetcher.LookUpSmartContractSourcesOnDemand), + configure_sc_microservice(Explorer.Chain.Fetcher.LookUpSmartContractSourcesOnDemand), configure(Explorer.Chain.Cache.RootstockLockedBTC), configure(Explorer.Chain.Cache.OptimismFinalizationPeriod), configure(Explorer.Migrator.TransactionsDenormalization), @@ -144,6 +149,10 @@ defmodule Explorer.Application do configure(Explorer.Migrator.TokenTransferBlockConsensus), configure(Explorer.Migrator.RestoreOmittedWETHTransfers), configure(Explorer.Migrator.FilecoinPendingAddressOperations), + Explorer.Migrator.BackfillMultichainSearchDB + |> configure_mode_dependent_process(:indexer) + |> configure_multichain_search_microservice(), + configure_mode_dependent_process(Explorer.Migrator.ArbitrumDaRecordsNormalization, :indexer), configure_mode_dependent_process(Explorer.Migrator.ShrinkInternalTransactions, :indexer), configure_chain_type_dependent_process(Explorer.Chain.Cache.BlackfortValidatorsCounters, :blackfort), configure_chain_type_dependent_process(Explorer.Chain.Cache.StabilityValidatorsCounters, :stability), @@ -157,7 +166,8 @@ defmodule Explorer.Application do configure_mode_dependent_process(Explorer.Migrator.SanitizeMissingTokenBalances, :indexer), configure_mode_dependent_process(Explorer.Migrator.SanitizeReplacedTransactions, :indexer), configure_mode_dependent_process(Explorer.Migrator.ReindexInternalTransactionsWithIncompatibleStatus, :indexer), - Explorer.Migrator.RefetchContractCodes |> configure() |> configure_chain_type_dependent_process(:zksync) + Explorer.Migrator.RefetchContractCodes |> configure() |> configure_chain_type_dependent_process(:zksync), + configure(Explorer.Chain.Fetcher.AddressesBlacklist) ] |> List.flatten() @@ -242,7 +252,7 @@ defmodule Explorer.Application do end end - defp sc_microservice_configure(process) do + defp configure_sc_microservice(process) do if Application.get_env(:explorer, Explorer.SmartContract.RustVerifierInterfaceBehaviour)[:eth_bytecode_db?] do process else @@ -250,6 +260,14 @@ defmodule Explorer.Application do end end + defp configure_multichain_search_microservice(process) do + if MultichainSearch.enabled?() do + process + else + [] + end + end + defp datadog_port do Application.get_env(:explorer, :datadog)[:port] end diff --git a/apps/explorer/lib/explorer/application/constants.ex b/apps/explorer/lib/explorer/application/constants.ex index 09787b9e18c7..d5e2413d90ea 100644 --- a/apps/explorer/lib/explorer/application/constants.ex +++ b/apps/explorer/lib/explorer/application/constants.ex @@ -100,14 +100,6 @@ defmodule Explorer.Application.Constants do """ @spec get_last_processed_token_address_hash(keyword()) :: nil | Explorer.Chain.Hash.t() def get_last_processed_token_address_hash(options \\ []) do - result = get_constant_by_key(@last_processed_erc_721_token, options) - - case Chain.string_to_address_hash(result) do - {:ok, address_hash} -> - address_hash - - _ -> - nil - end + @last_processed_erc_721_token |> get_constant_by_key(options) |> Chain.string_to_address_hash_or_nil() end end diff --git a/apps/explorer/lib/explorer/arbitrum/claim_rollup_message.ex b/apps/explorer/lib/explorer/arbitrum/claim_rollup_message.ex new file mode 100644 index 000000000000..f68b5b04def8 --- /dev/null +++ b/apps/explorer/lib/explorer/arbitrum/claim_rollup_message.ex @@ -0,0 +1,685 @@ +defmodule Explorer.Arbitrum.ClaimRollupMessage do + @moduledoc """ + Provides functionality to read L2->L1 messages and prepare withdrawal claims in the Arbitrum protocol. + + This module allows: + - Retrieving L2->L1 messages from a transaction's logs and determining their current + status. This is used when a user has a transaction hash and needs to identify + which messages from this transaction can be claimed on L1. + - Generating calldata for claiming confirmed withdrawals through the L1 Outbox + contract using a specific message ID. This is typically used when the message ID + is already known (e.g., from transaction details or L2->L1 messages list in the UI). + + For detailed information about Arbitrum's L2->L1 messaging system, see: + https://docs.arbitrum.io/how-arbitrum-works/arbos/l2-l1-messaging + """ + + alias ABI.TypeDecoder + alias EthereumJSONRPC + alias EthereumJSONRPC.Arbitrum, as: ArbitrumRpc + alias EthereumJSONRPC.Arbitrum.Constants.Contracts, as: ArbitrumContracts + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents + alias EthereumJSONRPC.Encoder + alias Explorer.Chain + alias Explorer.Chain.Arbitrum.Reader.API.General, as: GeneralReader + alias Explorer.Chain.Arbitrum.Reader.API.Messages, as: MessagesReader + alias Explorer.Chain.Arbitrum.Reader.API.Settlement, as: SettlementReader + alias Explorer.Chain.{Data, Hash} + alias Explorer.Chain.Hash.Address + alias Indexer.Helper, as: IndexerHelper + + require Logger + + @doc """ + Retrieves all L2->L1 messages initiated by a transaction. + + This function scans the transaction logs for L2ToL1Tx events and converts them + into withdrawal objects. For each event, it attempts to find a corresponding + message record in the database to determine the message status. If a message + record is not found (e.g., due to database inconsistency or fetcher issues), + the function attempts to restore the message status through requests to the RPC + node. + + ## Parameters + - `transaction_hash`: The hash of the transaction to scan for L2ToL1Tx events + + ## Returns + - A list of `Explorer.Arbitrum.Withdraw.t()` objects, each representing a single + L2->L1 message initiated by the transaction. The list may be empty if no + L2ToL1Tx events are found. + """ + @spec transaction_to_withdrawals(Hash.Full.t()) :: [Explorer.Arbitrum.Withdraw.t()] + def transaction_to_withdrawals(transaction_hash) do + # request messages initiated by the provided transaction from the database + messages = MessagesReader.l2_to_l1_messages_by_transaction_hash(transaction_hash) + + # request associated logs from the database + logs = GeneralReader.transaction_to_logs_by_topic0(transaction_hash, ArbitrumEvents.l2_to_l1()) + + logs + |> Enum.map(fn log -> + msg = Enum.find(messages, fn msg -> msg.message_id == Hash.to_integer(log.fourth_topic) end) + # `msg` is needed to retrieve the message status + # Regularly the message should be found, but in rare cases (database inconsistent, fetcher issues) it may omit. + # In this case log_to_withdrawal/1 will be used to retrieve L2->L1 message status from the RPC node + log_to_withdrawal(log, msg) + end) + end + + @doc """ + Constructs calldata for claiming an L2->L1 message on the L1 chain. + + This function retrieves the L2->L1 message record from the database by the given + message ID and generates the proof and calldata needed for executing the message + through the Outbox contract on L1. Only messages with :confirmed status can be + claimed. + + ## Parameters + - `message_id`: The unique identifier of the L2->L1 message (`position` field of + the associated `L2ToL1Tx` event) + + ## Returns + - `{:ok, [contract_address: String.t(), calldata: String.t()]}` where: + * `contract_address` is the L1 Outbox contract address + * `calldata` is the ABI-encoded executeTransaction function call + - `{:error, :not_found}` if either: + * the message with the given ID cannot be found in the database + * the associated L2ToL1Tx event log cannot be found + - `{:error, :initiated}` if the message is not yet confirmed + - `{:error, :sent}` if the message is not yet confirmed + - `{:error, :relayed}` if the message has already been claimed + - `{:error, :internal_error}` if the message status is unknown + """ + @spec claim(non_neg_integer()) :: {:ok, [contract_address: String.t(), calldata: String.t()]} | {:error, term()} + def claim(message_id) do + case MessagesReader.l2_to_l1_message_by_id(message_id) do + nil -> + Logger.error("Unable to find withdrawal with id #{message_id}") + {:error, :not_found} + + message -> + claim_message(message) + end + end + + # Constructs calldata for claiming an L2->L1 message on L1. + # + # This function retrieves the L2ToL1Tx event log associated with the message and + # verifies the message status. Only messages with :confirmed status can be claimed. + # For confirmed messages, it generates calldata with the proof needed for executing + # the message through the Outbox contract on L1. + # + # ## Parameters + # - `message`: The L2->L1 message record containing transaction details and status + # + # ## Returns + # - `{:ok, [contract_address: binary(), calldata: binary()]}` where: + # * `contract_address` is the L1 Outbox contract address + # * `calldata` is the ABI-encoded executeTransaction function call + # - `{:error, :not_found}` if either: + # * the associated L2ToL1Tx event log cannot be found + # * the withdrawal cannot be found in the transaction logs + # - `{:error, :initiated}` if the message is not yet confirmed + # - `{:error, :sent}` if the message is not yet confirmed + # - `{:error, :relayed}` if the message has already been claimed + # - `{:error, :internal_error}` if the message status is unknown + @spec claim_message(Explorer.Chain.Arbitrum.Message.t()) :: + {:ok, list({:contract_address, binary()} | {:calldata, binary()})} + | {:error, :initiated | :sent | :relayed | :internal_error} + defp claim_message(message) do + # request associated log from the database + case message.originating_transaction_hash + |> GeneralReader.transaction_to_logs_by_topic0(ArbitrumEvents.l2_to_l1()) + |> Enum.find(fn log -> Hash.to_integer(log.fourth_topic) == message.message_id end) do + nil -> + Logger.error("Unable to find log with message_id #{message.message_id}") + {:error, :not_found} + + log -> + case log_to_withdrawal(log, message) do + nil -> + Logger.error( + "Unable to find withdrawal with id #{message.message_id} in transaction #{Hash.to_string(message.originating_transaction_hash)}" + ) + + {:error, :not_found} + + withdrawal when withdrawal.status == :confirmed -> + construct_claim(withdrawal) + + w when w.status == :initiated -> + {:error, :initiated} + + w when w.status == :sent -> + {:error, :sent} + + w when w.status == :relayed -> + {:error, :relayed} + + w when w.status == :unknown -> + {:error, :internal_error} + end + end + end + + # Converts an L2ToL1Tx event log into a withdrawal structure using the provided message information. + # + # This function extracts withdrawal details from the L2ToL1Tx event log and combines + # them with the message status from the database. For messages with status + # :initiated or :sent, it verifies the actual message status since the database + # status might be outdated if Arbitrum-specific fetchers were stopped. Also + # extracts token transfer information if the message represents a token withdrawal. + # + # ## Parameters + # - `log`: The L2ToL1Tx event log containing withdrawal information + # - `message`: The message record from database containing status information, or + # `nil` to fall back to `log_to_withdrawal/1` + # + # ## Returns + # - An Explorer.Arbitrum.Withdraw struct representing the withdrawal, or + # - `nil` if the message ID from the log doesn't match the provided message + @spec log_to_withdrawal( + Explorer.Chain.Log.t(), + Explorer.Chain.Arbitrum.Message.t() | nil + ) :: Explorer.Arbitrum.Withdraw.t() | nil + + defp log_to_withdrawal(log, nil) do + log_to_withdrawal(log) + end + + defp log_to_withdrawal(log, message) do + # getting needed fields from the L2ToL1Tx event + fields = + log + |> convert_explorer_log_to_map() + |> ArbitrumRpc.l2_to_l1_event_parse() + + if fields.message_id == message.message_id do + # extract token withdrawal info from the associated event's data + token = decode_token_withdrawal_data(fields.data) + + data_hex = + fields.data + |> Base.encode16(case: :lower) + + {:ok, caller_address} = Hash.Address.cast(fields.caller) + {:ok, destination_address} = Hash.Address.cast(fields.destination) + + # For :initiated and :sent statuses, we need to verify the actual message status + # since the database status could be outdated if Arbitrum fetchers were stopped. + message_status = + case message.status do + status when status == :initiated or status == :sent -> + get_actual_message_status(message.message_id) + + status -> + status + end + + %Explorer.Arbitrum.Withdraw{ + message_id: Hash.to_integer(log.fourth_topic), + status: message_status, + caller: caller_address, + destination: destination_address, + arb_block_number: fields.arb_block_number, + eth_block_number: fields.eth_block_number, + l2_timestamp: fields.timestamp, + callvalue: fields.callvalue, + data: "0x" <> data_hex, + token: token + } + else + Logger.error( + "message_to_withdrawal: log doesn't correspond message (#{fields.position} != #{message.message_id})" + ) + + nil + end + end + + # Converts an L2ToL1Tx event log into a withdrawal structure when the message + # information is not available in the database. + # + # This function parses the event log data, extracts both the basic withdrawal + # information and any associated token transfer data if the message represents a + # token withdrawal (by examining the finalizeInboundTransfer calldata). Since the + # message is not found in the database, the function attempts to determine its + # current status by comparing the message ID with the total count of messages sent + # from L2. + # + # ## Parameters + # - `log`: The L2ToL1Tx event log containing withdrawal information + # + # ## Returns + # - An Explorer.Arbitrum.Withdraw struct representing the withdrawal + @spec log_to_withdrawal(Explorer.Chain.Log.t()) :: Explorer.Arbitrum.Withdraw.t() + defp log_to_withdrawal(log) do + # getting needed fields from the L2ToL1Tx event + fields = + log + |> convert_explorer_log_to_map() + |> ArbitrumRpc.l2_to_l1_event_parse() + + status = get_actual_message_status(fields.message_id) + + token = decode_token_withdrawal_data(fields.data) + + data_hex = + fields.data + |> Base.encode16(case: :lower) + + {:ok, caller_address} = Hash.Address.cast(fields.caller) + {:ok, destination_address} = Hash.Address.cast(fields.destination) + + %Explorer.Arbitrum.Withdraw{ + message_id: Hash.to_integer(log.fourth_topic), + status: status, + caller: caller_address, + destination: destination_address, + arb_block_number: fields.arb_block_number, + eth_block_number: fields.eth_block_number, + l2_timestamp: fields.timestamp, + callvalue: fields.callvalue, + data: "0x" <> data_hex, + token: token + } + end + + # Guesses the actual status of an L2->L1 message by analyzing data from the RPC node and the database + # + # The function first checks if the message has been spent (claimed) on L1 by + # querying the Outbox contract. If the message is spent, its status is `:relayed`. + # Otherwise, the function determines the message status by comparing its ID with + # the total count of messages sent from rollup up to the most recent confirmed + # rollup block. For L2->L1 message claiming purposes it is not needed to distinguish + # between `:sent` and `:initiated` statuses since in either of this statuses means + # that the message cannot be claimed yet. + # + # ## Parameters + # - `message_id`: The unique identifier of the L2->L1 message + # + # ## Returns + # - `:unknown` if unable to determine the message status + # - `:sent` if the message is not yet confirmed + # - `:confirmed` if the message is confirmed but not yet claimed + # - `:relayed` if the message has been successfully claimed on L1 + @spec get_actual_message_status(non_neg_integer()) :: :unknown | :sent | :confirmed | :relayed + defp get_actual_message_status(message_id) do + # getting needed L1\L2 properties: RPC URL and Main Rollup contract address + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + json_l1_rpc_named_arguments = IndexerHelper.json_rpc_named_arguments(config_common[:l1_rpc]) + + outbox_contract = + ArbitrumRpc.get_contracts_for_rollup( + config_common[:l1_rollup_address], + :inbox_outbox, + json_l1_rpc_named_arguments + )[:outbox] + + {:ok, is_withdrawal_spent} = + ArbitrumRpc.withdrawal_spent?(outbox_contract, message_id, json_l1_rpc_named_arguments) + + case is_withdrawal_spent do + true -> + :relayed + + false -> + case get_size_for_proof() do + nil -> :unknown + size when size > message_id -> :confirmed + _ -> :sent + end + end + end + + # Converts an Explorer.Chain.Log struct into a map suitable for L2->L1 event parsing. + # + # This function transforms the log data into a format required by the + # `EthereumJSONRPC.Arbitrum.l2_to_l1_event_parse/1` function. + @spec convert_explorer_log_to_map(Explorer.Chain.Log.t()) :: %{ + :data => binary(), + :second_topic => binary(), + :fourth_topic => binary() + } + defp convert_explorer_log_to_map(log) do + %{ + :data => Data.to_string(log.data), + :second_topic => Hash.to_string(log.second_topic), + :fourth_topic => Hash.to_string(log.fourth_topic) + } + end + + # Extracts token withdrawal information from the finalizeInboundTransfer calldata. + # + # The calldata is encapsulated in the L2ToL1Tx event and is meant to be executed on + # the TokenBridge contract during withdrawal claiming. + # + # ## Parameters + # - `data`: Binary data containing the finalizeInboundTransfer calldata + # + # ## Returns + # - Map containing token `address`, `destination` address and token `amount` if the + # data corresponds to finalizeInboundTransfer + # - `nil` if data is void or doesn't match finalizeInboundTransfer method (which + # happens when the L2->L1 message is for arbitrary data transfer, such as a remote + # call of a smart contract on L1) + @spec decode_token_withdrawal_data(binary()) :: + %{ + address: Explorer.Chain.Hash.Address.t(), + destination: Explorer.Chain.Hash.Address.t(), + amount: non_neg_integer() + } + | nil + defp decode_token_withdrawal_data(<<0x2E567B36::32, rest_data::binary>>) do + [token, _, to, amount, _] = ABI.decode(ArbitrumContracts.finalize_inbound_transfer_selector_with_abi(), rest_data) + + token_bin = + case Address.cast(token) do + {:ok, address} -> address + _ -> nil + end + + to_bin = + case Address.cast(to) do + {:ok, address} -> address + _ -> nil + end + + %{ + address: token_bin, + destination: to_bin, + amount: amount + } + end + + defp decode_token_withdrawal_data(_binary) do + nil + end + + # Builds a claim transaction calldata for executing an L2->L1 message on L1. + # + # Constructs calldata containing the proof needed to execute a withdrawal message + # through the Outbox contract on L1. The function performs the following steps: + # 1. Gets the total count of L2->L1 messages (size) + # 2. Constructs the outbox proof using NodeInterface contract on the rollup + # 3. Encodes the executeTransaction function call with the proof and message data + # + # ## Parameters + # - `withdrawal`: A withdrawal message containing all necessary data for claim + # construction. + # + # ## Returns + # - `{:ok, [contract_address: binary(), calldata: binary()]}` where: + # * `contract_address` is the L1 Outbox contract address + # * `calldata` is the ABI-encoded executeTransaction function call + # - `{:error, :internal_error}` if proof construction fails + @spec construct_claim(Explorer.Arbitrum.Withdraw.t()) :: + {:ok, [contract_address: binary(), calldata: binary()]} | {:error, :internal_error} + defp construct_claim(withdrawal) do + # getting needed L1 properties: RPC URL and Main Rollup contract address + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + l1_rpc = config_common[:l1_rpc] + json_l1_rpc_named_arguments = IndexerHelper.json_rpc_named_arguments(l1_rpc) + json_l2_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) + l1_rollup_address = config_common[:l1_rollup_address] + + outbox_contract = + ArbitrumRpc.get_contracts_for_rollup(l1_rollup_address, :inbox_outbox, json_l1_rpc_named_arguments)[:outbox] + + case get_size_for_proof() do + nil -> + Logger.error("Cannot get size for proof") + {:error, :internal_error} + + size -> + # now we are ready to construct outbox proof + case ArbitrumRpc.construct_outbox_proof( + ArbitrumContracts.node_interface_contract_address(), + size, + withdrawal.message_id, + json_l2_rpc_named_arguments + ) do + {:ok, [_send, _root, proof]} -> + proof_values = raw_proof_to_hex(proof) + + # finally encode function call + args = [ + proof_values, + withdrawal.message_id, + Hash.to_string(withdrawal.caller), + Hash.to_string(withdrawal.destination), + withdrawal.arb_block_number, + withdrawal.eth_block_number, + withdrawal.l2_timestamp, + withdrawal.callvalue, + withdrawal.data + ] + + calldata = Encoder.encode_function_call(ArbitrumContracts.execute_transaction_selector_with_abi(), args) + + {:ok, [contract_address: outbox_contract, calldata: calldata]} + + {:error, _} -> + Logger.error("Unable to construct proof with size = #{size}, leaf = #{withdrawal.message_id}") + + {:error, :internal_error} + end + end + end + + # Converts list of binaries into the hex-encoded 0x-prefixed strings + defp raw_proof_to_hex(proof) do + proof + |> Enum.map(fn p -> "0x" <> Base.encode16(p, case: :lower) end) + end + + # Retrieves the size parameter (total count of L2->L1 messages) needed for outbox + # proof construction. First attempts to fetch from the local database, falling back + # to RPC requests if necessary. + @spec get_size_for_proof() :: non_neg_integer() | nil + defp get_size_for_proof do + case get_size_for_proof_from_database() do + nil -> + Logger.warning("The database doesn't contain required data to construct proof. Fallback to direct RPC request") + + config_common = Application.get_all_env(:indexer)[Indexer.Fetcher.Arbitrum] + l1_rollup_address = config_common[:l1_rollup_address] + json_l1_rpc_named_arguments = IndexerHelper.json_rpc_named_arguments(config_common[:l1_rpc]) + json_l2_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) + + get_size_for_proof_from_rpc(l1_rollup_address, json_l1_rpc_named_arguments, json_l2_rpc_named_arguments) + + size -> + size + end + end + + # Retrieves the size parameter (total count of L2->L1 messages) needed for outbox + # proof construction using data from the database. + # + # The function gets the highest confirmed block number and retrieves its + # associated send_count value, which represents the cumulative count of L2->L1 + # messages. + # + # ## Returns + # - Total count of L2->L1 messages up to the latest confirmed rollup block + # - `nil` if the required data is not found in the database + @spec get_size_for_proof_from_database() :: non_neg_integer() | nil + defp get_size_for_proof_from_database do + case SettlementReader.highest_confirmed_block() do + nil -> + nil + + highest_confirmed_block -> + case Chain.number_to_block(highest_confirmed_block) do + {:ok, block} -> Map.get(block, :send_count) + _ -> nil + end + end + end + + # Retrieves the size parameter (total count of L2->L1 messages) needed for outbox + # proof construction via RPC calls. + # + # Note: The "size" parameter represents the cumulative count of L2->L1 messages + # that have been sent up to the latest confirmed node. + # + # This function performs the following steps: + # 1. Gets the latest confirmed node index from the L1 rollup contract + # 2. Retrieves the L1 block number where that node was created + # 3. Uses the block number to determine the total count of L2->L1 messages + # + # ## Parameters + # - `l1_rollup_address`: Address of the Arbitrum rollup contract on L1 + # - `json_l1_rpc_named_arguments`: Configuration for L1 JSON-RPC connection + # - `json_l2_rpc_named_arguments`: Configuration for rollup JSON-RPC connection + # + # ## Returns + # - Total count of L2->L1 messages up to the latest confirmed node + # - `nil` if any step in the process fails + @spec get_size_for_proof_from_rpc( + String.t(), + EthereumJSONRPC.json_rpc_named_arguments(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: non_neg_integer() | nil + defp get_size_for_proof_from_rpc(l1_rollup_address, json_l1_rpc_named_arguments, json_l2_rpc_named_arguments) do + # getting latest confirmed node index (L1) from the database + {:ok, latest_confirmed_node_index} = + ArbitrumRpc.get_latest_confirmed_node_index( + l1_rollup_address, + json_l1_rpc_named_arguments + ) + + # getting L1 block number where that node was created + case ArbitrumRpc.get_node_creation_block_number( + l1_rollup_address, + latest_confirmed_node_index, + json_l1_rpc_named_arguments + ) do + {:ok, node_creation_l1_block_number} -> + # getting associated L2 block and extracting `send_count` value from it + l1_block_number_to_withdrawals_count( + node_creation_l1_block_number, + l1_rollup_address, + json_l1_rpc_named_arguments, + json_l2_rpc_named_arguments + ) + + {:error, error} -> + Logger.error("Cannot fetch node creation block number: #{inspect(error)}") + nil + end + end + + # Retrieves the total count of L2->L1 messages sent up to the rollup block associated + # with a NodeCreated event in the specified L1 block. + # + # The function first fetches the NodeCreated event from the L1 block, extracts the + # corresponding rollup block hash, and then retrieves the send_count value from that + # rollup block. If the rollup block is not found in the database, falls back to + # querying the rollup JSON-RPC endpoint directly. + # + # ## Parameters + # - `node_creation_l1_block_number`: L1 block number containing a NodeCreated event + # - `l1_rollup_address`: Address of the Rollup contract on L1 + # - `json_l1_rpc_named_arguments`: Configuration for L1 JSON-RPC connection + # - `json_l2_rpc_named_arguments`: Configuration for rollup JSON-RPC connection + # + # ## Returns + # - Number of L2->L1 messages sent up to the associated rollup block + # - `nil` if the event cannot be found or block data cannot be retrieved + @spec l1_block_number_to_withdrawals_count( + non_neg_integer(), + String.t(), + EthereumJSONRPC.json_rpc_named_arguments(), + EthereumJSONRPC.json_rpc_named_arguments() + ) :: non_neg_integer() | nil + defp l1_block_number_to_withdrawals_count( + node_creation_l1_block_number, + l1_rollup_address, + json_l1_rpc_named_arguments, + json_l2_rpc_named_arguments + ) do + # request NodeCreated event from L1 block emitted by the Rollup contract + case IndexerHelper.get_logs( + node_creation_l1_block_number, + node_creation_l1_block_number, + l1_rollup_address, + [ArbitrumEvents.node_created()], + json_l1_rpc_named_arguments + ) do + {:ok, events} when is_list(events) and length(events) > 0 -> + node_created_event = List.last(events) + # extract L2 block hash from the NodeCreated event + l2_block_hash = l2_block_hash_from_node_created_event(node_created_event) + + {:ok, l2_block_hash} = + l2_block_hash + |> Hash.Full.cast() + + # get `send_count` value from the L2 block which represents amount of L2->L1 messages sent up to this block + messages_count_up_to_block_with_hash(l2_block_hash, json_l2_rpc_named_arguments) + + _ -> + Logger.error("Cannot fetch NodeCreated event in L1 block #{node_creation_l1_block_number}") + nil + end + end + + # Retrieves the total count of L2->L1 messages sent up to a specific rollup block. + # + # First attempts to fetch the block from the database. If not found, falls back + # to querying the rollup JSON-RPC endpoint directly. + # + # ## Parameters + # - `l2_block_hash`: The full hash of the rollup block to query + # - `json_l2_rpc_named_arguments`: Configuration options for the rollup JSON-RPC + # connection + # + # ## Returns + # - The `send_count` value from the block representing total L2->L1 messages sent + # - `nil` if the block cannot be retrieved or an error occurs + @spec messages_count_up_to_block_with_hash(Hash.Full.t(), EthereumJSONRPC.json_rpc_named_arguments()) :: + non_neg_integer() | nil + defp messages_count_up_to_block_with_hash(l2_block_hash, json_l2_rpc_named_arguments) do + case Chain.hash_to_block(l2_block_hash, api?: true) do + {:ok, block} -> + Map.get(block, :send_count) + + {:error, _} -> + case EthereumJSONRPC.fetch_blocks_by_hash( + [Hash.to_string(l2_block_hash)], + json_l2_rpc_named_arguments, + false + ) do + {:ok, blocks} -> + blocks.blocks_params + |> hd() + |> Map.get(:send_count) + + {:error, error} -> + Logger.error("Failed to fetch L2 block by hash #{l2_block_hash}: #{inspect(error)}") + nil + end + end + end + + # Extracts rollup block hash associated with the NodeCreated event emitted on L1 + @spec l2_block_hash_from_node_created_event(%{data: binary()}) :: binary() + defp l2_block_hash_from_node_created_event(event) do + [ + _execution_hash, + {_, {{[l2_block_hash, _], _}, _}, _}, + _after_inbox_batch_acc, + _wasm_module_root, + _inbox_max_count + ] = + event + |> Map.get("data") + |> String.trim_leading("0x") + |> Base.decode16!(case: :mixed) + |> TypeDecoder.decode_raw(ArbitrumEvents.node_created_unindexed_params()) + + l2_block_hash + end +end diff --git a/apps/explorer/lib/explorer/arbitrum/withdraw.ex b/apps/explorer/lib/explorer/arbitrum/withdraw.ex new file mode 100644 index 000000000000..2e97bc8cb6da --- /dev/null +++ b/apps/explorer/lib/explorer/arbitrum/withdraw.ex @@ -0,0 +1,73 @@ +defmodule Explorer.Arbitrum.Withdraw do + @moduledoc """ + Models an L2->L1 withdraw on Arbitrum. + + """ + + alias Explorer.Chain.Hash + + @typedoc """ + Descriptor of the L2ToL1Tx event message on Arbitrum rollups: + * `message_id` - The ID of the message used for referencing. + * `status` - The status of the withdrawal: `:unknown`, `:initiated`, `:sent`, `:confirmed`, `:relayed` + * `caller` - The sender of the withdraw transaction. + * `destination` - The receiver of the funds on L1 chain. + * `arb_block_number` - The number of the block where originating transaction is included. + * `eth_block_number` - The associated block number on the destination chain. + * `l2_timestamp` - The timestamp of the originating transaction. + * `callvalue` - The amount of the native coins to withdraw + * `data` - Raw transaction data which will be sent to the destination address on L1 chain + on claiming the withdraw. In that case destination should be a contract address + otherwise the transaction will fail. Typically this field contain calldata for + `finalizeInboundTransfer(address,address,address,uint256,bytes)` method of the + Bridge contract and it intended to withdraw supported tokens instead of native coins. + * `token_address` - extracted address of the token to withdraw in case of `data` field represents Bridge transaction + * `token_destination` - extracted receiver address in case of `data` field represents Bridge transaction + * `token_amount` - extracted token amount in case of `data` field represents Bridge transaction + """ + + @type t :: %__MODULE__{ + message_id: message_id, + status: status, + caller: caller, + destination: destination, + arb_block_number: arb_block_number, + eth_block_number: eth_block_number, + l2_timestamp: l2_timestamp, + callvalue: callvalue, + data: data, + token: + %{ + address: token_address, + destination: token_destination, + amount: token_amount + } + | nil + } + + @typep message_id :: non_neg_integer() + @typep status :: :unknown | :initiated | :sent | :confirmed | :relayed + @typep caller :: Hash.Address.t() + @typep destination :: Hash.Address.t() + @typep arb_block_number :: non_neg_integer() + @typep eth_block_number :: non_neg_integer() + @typep l2_timestamp :: non_neg_integer() + @typep callvalue :: non_neg_integer() + @typep data :: binary() + @typep token_address :: Hash.Address.t() + @typep token_destination :: Hash.Address.t() + @typep token_amount :: non_neg_integer() + + defstruct [ + :message_id, + :status, + :caller, + :destination, + :arb_block_number, + :eth_block_number, + :l2_timestamp, + :callvalue, + :data, + token: nil + ] +end diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index b75ca90b9fa0..4b34351086d9 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -88,6 +88,7 @@ defmodule Explorer.Chain do alias Explorer.Chain.SmartContract.Proxy.Models.Implementation alias Explorer.Market.MarketHistoryCache + alias Explorer.MicroserviceInterfaces.MultichainSearch alias Explorer.{PagingOptions, Repo} alias Dataloader.Ecto, as: DataloaderEcto @@ -96,7 +97,7 @@ defmodule Explorer.Chain do @default_paging_options %PagingOptions{page_size: @default_page_size} @token_transfers_per_transaction_preview 10 - @token_transfers_necessity_by_association %{ + @token_transfer_necessity_by_association %{ [from_address: :smart_contract] => :optional, [to_address: :smart_contract] => :optional, [from_address: :names] => :optional, @@ -258,8 +259,7 @@ defmodule Explorer.Chain do defp common_where_limit_order(query, paging_options) do query |> InternalTransaction.where_is_different_from_parent_transaction() - # todo: replace `index_int_tx_desc_order` with `index_internal_transaction_desc_order` in the next line when new frontend is bound to `index_internal_transaction_desc_order` property - |> page_internal_transaction(paging_options, %{index_int_tx_desc_order: true}) + |> page_internal_transaction(paging_options, %{index_internal_transaction_desc_order: true}) |> limit(^paging_options.page_size) |> order_by( [it], @@ -585,7 +585,7 @@ defmodule Explorer.Chain do do: &1, else: Enum.map(&1, fn transaction -> - preload_token_transfers(transaction, @token_transfers_necessity_by_association, options) + preload_token_transfers(transaction, @token_transfer_necessity_by_association, options) end) )).() end @@ -604,7 +604,7 @@ defmodule Explorer.Chain do |> (& &1).() |> select_repo(options).all() |> (&Enum.map(&1, fn transaction -> - preload_token_transfers(transaction, @token_transfers_necessity_by_association, options) + preload_token_transfers(transaction, @token_transfer_necessity_by_association, options) end)).() end @@ -1427,7 +1427,31 @@ defmodule Explorer.Chain do """ @spec import(Import.all_options()) :: Import.all_result() def import(options) do - Import.all(options) + case Import.all(options) do + {:ok, imported} = result -> + assets_to_import = %{ + addresses: imported[:addresses] || [], + blocks: imported[:blocks] || [], + transactions: imported[:transactions] || [] + } + + if assets_to_import == %{ + addresses: [], + blocks: [], + transactions: [] + } do + result + else + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + case MultichainSearch.batch_import(assets_to_import) do + {:ok, _} -> result + _ -> {:error, :insert_to_multichain_search_db_failed} + end + end + + other_result -> + other_result + end end @doc """ @@ -2202,7 +2226,7 @@ defmodule Explorer.Chain do last_block_period = DateTime.diff(now, timestamp, :millisecond) if last_block_period > Application.get_env(:explorer, :healthy_blocks_period) do - {:error, number, timestamp} + {:stale, number, timestamp} else {:ok, number, timestamp} end @@ -2670,7 +2694,7 @@ defmodule Explorer.Chain do do: &1, else: Enum.map(&1, fn transaction -> - preload_token_transfers(transaction, @token_transfers_necessity_by_association, options) + preload_token_transfers(transaction, @token_transfer_necessity_by_association, options) end) )).() end @@ -2781,6 +2805,14 @@ defmodule Explorer.Chain do def string_to_address_hash(_), do: :error + @spec string_to_address_hash_or_nil(String.t()) :: Hash.Address.t() | nil + def string_to_address_hash_or_nil(string) do + case string_to_address_hash(string) do + {:ok, hash} -> hash + :error -> nil + end + end + @doc """ The `string` must start with `0x`, then is converted to an integer and then to `t:Explorer.Chain.Hash.t/0`. @@ -2835,6 +2867,24 @@ defmodule Explorer.Chain do def string_to_transaction_hash(_), do: :error + @doc """ + Constructs the base query `Ecto.Query.t()/0` to create requests to the transaction logs + + ## Returns + + * The query to the Log table with the joined associated transactions. + + """ + @spec log_with_transactions_query() :: Ecto.Query.t() + def log_with_transactions_query do + from(log in Log, + inner_join: transaction in Transaction, + on: + transaction.block_hash == log.block_hash and transaction.block_number == log.block_number and + transaction.hash == log.transaction_hash + ) + end + @doc """ Finds all `t:Explorer.Chain.Log.t/0`s for `t:Explorer.Chain.Transaction.t/0`. @@ -2853,23 +2903,12 @@ defmodule Explorer.Chain do necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) paging_options = Keyword.get(options, :paging_options, @default_paging_options) - log_with_transactions = - from(log in Log, - inner_join: transaction in Transaction, - on: - transaction.block_hash == log.block_hash and transaction.block_number == log.block_number and - transaction.hash == log.transaction_hash - ) - - query = - log_with_transactions - |> where([_, transaction], transaction.hash == ^transaction_hash) - |> page_transaction_logs(paging_options) - |> limit(^paging_options.page_size) - |> order_by([log], asc: log.index) - |> join_associations(necessity_by_association) - - query + log_with_transactions_query() + |> where([_, transaction], transaction.hash == ^transaction_hash) + |> page_transaction_logs(paging_options) + |> limit(^paging_options.page_size) + |> order_by([log], asc: log.index) + |> join_associations(necessity_by_association) |> select_repo(options).all() end @@ -3107,7 +3146,20 @@ defmodule Explorer.Chain do Wei.to(value, unit) end - def smart_contract_bytecode(address_hash) do + @doc """ + Retrieves the bytecode of a smart contract. + + ## Parameters + + - `address_or_hash` (binary() | Hash.Address.t()): The address hash of the smart contract. + - `options` (api?()): keyword to determine target DB (read replica or primary). + + ## Returns + + - `binary()`: The bytecode of the smart contract. + """ + @spec smart_contract_bytecode(binary() | Hash.Address.t()) :: binary() + def smart_contract_bytecode(address_hash, options \\ []) do query = from( address in Address, @@ -3116,7 +3168,7 @@ defmodule Explorer.Chain do ) query - |> Repo.one() + |> select_repo(options).one() |> Data.to_string() end @@ -3429,33 +3481,16 @@ defmodule Explorer.Chain do where(query, [coin_balance], coin_balance.block_number < ^block_number) end - # todo: replace `index_int_tx_desc_order` with `index_internal_transaction_desc_order` in the next clause when new frontend is bound to `index_internal_transaction_desc_order` property - def page_internal_transaction(_, _, _ \\ %{index_int_tx_desc_order: false}) + def page_internal_transaction(_, _, _ \\ %{index_internal_transaction_desc_order: false}) def page_internal_transaction(query, %PagingOptions{key: nil}, _), do: query - # todo: keep next clause for compatibility with frontend and remove when new frontend is bound to `index_internal_transaction_desc_order` property - def page_internal_transaction(query, %PagingOptions{key: {block_number, transaction_index, index}}, %{ - index_int_tx_desc_order: desc_order - }) do - hardcoded_where_for_page_internal_transaction(query, block_number, transaction_index, index, desc_order) - end - def page_internal_transaction(query, %PagingOptions{key: {block_number, transaction_index, index}}, %{ index_internal_transaction_desc_order: desc_order }) do hardcoded_where_for_page_internal_transaction(query, block_number, transaction_index, index, desc_order) end - # todo: keep next clause for compatibility with frontend and remove when new frontend is bound to `index_internal_transaction_desc_order` property - def page_internal_transaction(query, %PagingOptions{key: {0}}, %{index_int_tx_desc_order: desc_order}) do - if desc_order do - query - else - where(query, [internal_transaction], internal_transaction.index > 0) - end - end - def page_internal_transaction(query, %PagingOptions{key: {0}}, %{index_internal_transaction_desc_order: desc_order}) do if desc_order do query @@ -3464,15 +3499,6 @@ defmodule Explorer.Chain do end end - # todo: keep next clause for compatibility with frontend and remove when new frontend is bound to `index_internal_transaction_desc_order` property - def page_internal_transaction(query, %PagingOptions{key: {index}}, %{index_int_tx_desc_order: desc_order}) do - if desc_order do - where(query, [internal_transaction], internal_transaction.index < ^index) - else - where(query, [internal_transaction], internal_transaction.index > ^index) - end - end - def page_internal_transaction(query, %PagingOptions{key: {index}}, %{ index_internal_transaction_desc_order: desc_order }) do diff --git a/apps/explorer/lib/explorer/chain/arbitrum/batch_to_da_blob.ex b/apps/explorer/lib/explorer/chain/arbitrum/batch_to_da_blob.ex new file mode 100644 index 000000000000..06dde817ff32 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/batch_to_da_blob.ex @@ -0,0 +1,64 @@ +defmodule Explorer.Chain.Arbitrum.BatchToDaBlob do + @moduledoc """ + Models a link between an Arbitrum L1 batch and its corresponding data blob. + + Changes in the schema should be reflected in the bulk import module: + - Explorer.Chain.Import.Runner.Arbitrum.BatchesToDaBlobs + + Migrations: + - Explorer.Repo.Arbitrum.Migrations.AddDataBlobsToBatchesTable + """ + + use Explorer.Schema + + alias Explorer.Chain.Arbitrum.{DaMultiPurposeRecord, L1Batch} + alias Explorer.Chain.Hash + + @required_attrs ~w(batch_number data_blob_id)a + + @typedoc """ + Descriptor of the link between an Arbitrum L1 batch and its data blob: + * `batch_number` - The number of the Arbitrum batch. + * `data_blob_id` - The hash of the data blob. + """ + @type to_import :: %{ + batch_number: non_neg_integer(), + data_blob_id: binary() + } + + @typedoc """ + * `batch_number` - The number of the Arbitrum batch. + * `data_blob_id` - The hash of the data blob. + * `batch` - An instance of `Explorer.Chain.Arbitrum.L1Batch` referenced by `batch_number`. + * `da_record` - An instance of `Explorer.Chain.Arbitrum.DaMultiPurposeRecord` referenced by `data_blob_id`. + """ + @primary_key {:batch_number, :integer, autogenerate: false} + typed_schema "arbitrum_batches_to_da_blobs" do + belongs_to(:batch, L1Batch, + foreign_key: :batch_number, + references: :number, + define_field: false + ) + + belongs_to(:da_record, DaMultiPurposeRecord, + foreign_key: :data_blob_id, + references: :data_key, + type: Hash.Full + ) + + timestamps() + end + + @doc """ + Validates that the `attrs` are valid. + """ + @spec changeset(Ecto.Schema.t(), map()) :: Ecto.Schema.t() + def changeset(%__MODULE__{} = batch_to_da_blob, attrs \\ %{}) do + batch_to_da_blob + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + |> foreign_key_constraint(:batch_number) + |> foreign_key_constraint(:data_blob_id) + |> unique_constraint(:batch_number) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader.ex deleted file mode 100644 index cdb7093611c5..000000000000 --- a/apps/explorer/lib/explorer/chain/arbitrum/reader.ex +++ /dev/null @@ -1,1355 +0,0 @@ -defmodule Explorer.Chain.Arbitrum.Reader do - @moduledoc """ - Contains read functions for Arbitrum modules. - """ - - import Ecto.Query, only: [dynamic: 2, from: 2, limit: 2, order_by: 2, select: 3, subquery: 1, where: 2, where: 3] - import Explorer.Chain, only: [select_repo: 1] - - alias Explorer.Chain.Arbitrum.{ - BatchBlock, - BatchTransaction, - DaMultiPurposeRecord, - L1Batch, - L1Execution, - LifecycleTransaction, - Message - } - - alias Explorer.{Chain, PagingOptions, Repo} - - alias Explorer.Chain.Block, as: FullBlock - alias Explorer.Chain.{Hash, Log, Transaction} - - # https://github.com/OffchainLabs/go-ethereum/blob/dff302de66598c36b964b971f72d35a95148e650/core/types/transaction.go#L44C2-L50 - @message_to_l2_eth_deposit 100 - @message_to_l2_submit_retryable_transaction 105 - @to_l2_messages_transaction_types [ - @message_to_l2_eth_deposit, - @message_to_l2_submit_retryable_transaction - ] - - @doc """ - Retrieves the number of the latest L1 block where an L1-to-L2 message was discovered. - - ## Returns - - The number of L1 block, or `nil` if no L1-to-L2 messages are found. - """ - @spec l1_block_of_latest_discovered_message_to_l2() :: FullBlock.block_number() | nil - def l1_block_of_latest_discovered_message_to_l2 do - query = - from(msg in Message, - select: msg.originating_transaction_block_number, - where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), - order_by: [desc: msg.message_id], - limit: 1 - ) - - query - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves the number of the earliest L1 block where an L1-to-L2 message was discovered. - - ## Returns - - The number of L1 block, or `nil` if no L1-to-L2 messages are found. - """ - @spec l1_block_of_earliest_discovered_message_to_l2() :: FullBlock.block_number() | nil - def l1_block_of_earliest_discovered_message_to_l2 do - query = - from(msg in Message, - select: msg.originating_transaction_block_number, - where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), - order_by: [asc: msg.message_id], - limit: 1 - ) - - query - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves the rollup block number of the first missed L2-to-L1 message. - - The function identifies missing messages by checking logs for the specified - L2-to-L1 event and verifying if there are corresponding entries in the messages - table. A message is considered missed if there is a log entry without a - matching message record. - - ## Parameters - - `arbsys_contract`: The address of the Arbitrum system contract. - - `l2_to_l1_event`: The event identifier for L2-to-L1 messages. - - ## Returns - - The block number of the first missed L2-to-L1 message, or `nil` if no missed - messages are found. - """ - @spec rollup_block_of_first_missed_message_from_l2(binary(), binary()) :: FullBlock.block_number() | nil - def rollup_block_of_first_missed_message_from_l2(arbsys_contract, l2_to_l1_event) do - # credo:disable-for-lines:5 Credo.Check.Refactor.PipeChainStart - missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event) - |> order_by(desc: :block_number) - |> limit(1) - |> select([log], log.block_number) - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves the rollup block number of the first missed L1-to-L2 message. - - The function identifies missing messages by checking transactions of specific - types that are supposed to contain L1-to-L2 messages and verifying if there are - corresponding entries in the messages table. A message is considered missed if - there is a transaction without a matching message record. - - ## Returns - - The block number of the first missed L1-to-L2 message, or `nil` if no missed - messages are found. - """ - @spec rollup_block_of_first_missed_message_to_l2() :: FullBlock.block_number() | nil - def rollup_block_of_first_missed_message_to_l2 do - missed_messages_to_l2_query() - |> order_by(desc: :block_number) - |> limit(1) - |> select([rollup_transaction], rollup_transaction.block_number) - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves the number of the latest L1 block where the commitment transaction with a batch was included. - - As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until - the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but - there is no commitment transaction is not possible. - - ## Returns - - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch - and the commitment transaction has been broken due to database inconsistency. - """ - @spec l1_block_of_latest_committed_batch() :: FullBlock.block_number() | nil - def l1_block_of_latest_committed_batch do - query = - from(batch in L1Batch, - order_by: [desc: batch.number], - limit: 1 - ) - - case query - # :required is used since the situation when commit transaction is not found is not possible - |> Chain.join_associations(%{:commitment_transaction => :required}) - |> Repo.one(timeout: :infinity) do - nil -> nil - batch -> batch.commitment_transaction.block_number - end - end - - @doc """ - Retrieves the number of the earliest L1 block where the commitment transaction with a batch was included. - - As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until - the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but - there is no commitment transaction is not possible. - - ## Returns - - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch - and the commitment transaction has been broken due to database inconsistency. - """ - @spec l1_block_of_earliest_committed_batch() :: FullBlock.block_number() | nil - def l1_block_of_earliest_committed_batch do - query = - from(batch in L1Batch, - order_by: [asc: batch.number], - limit: 1 - ) - - case query - # :required is used since the situation when commit transaction is not found is not possible - |> Chain.join_associations(%{:commitment_transaction => :required}) - |> Repo.one(timeout: :infinity) do - nil -> nil - batch -> batch.commitment_transaction.block_number - end - end - - @doc """ - Retrieves the block number of the highest rollup block that has been included in a batch. - - ## Returns - - The number of the highest rollup block included in a batch, or `nil` if no rollup batches are found. - """ - @spec highest_committed_block() :: FullBlock.block_number() | nil - def highest_committed_block do - query = - from(batch in L1Batch, - select: batch.end_block, - order_by: [desc: batch.number], - limit: 1 - ) - - query - |> Repo.one() - end - - @doc """ - Reads a list of L1 transactions by their hashes from the `arbitrum_lifecycle_l1_transactions` table and returns their IDs. - - ## Parameters - - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. - - ## Returns - - A list of tuples containing transaction hashes and IDs for the transaction - hashes from the input list. The output list may be smaller than the input - list. - """ - @spec lifecycle_transaction_ids([binary()]) :: [{Hash.t(), non_neg_integer}] - def lifecycle_transaction_ids(l1_transaction_hashes) when is_list(l1_transaction_hashes) do - query = - from( - lt in LifecycleTransaction, - select: {lt.hash, lt.id}, - where: lt.hash in ^l1_transaction_hashes - ) - - Repo.all(query) - end - - @doc """ - Reads a list of L1 transactions by their hashes from the `arbitrum_lifecycle_l1_transactions` table. - - ## Parameters - - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` corresponding to the - hashes from the input list. The output list may be smaller than the input - list. - """ - @spec lifecycle_transactions([binary()]) :: [LifecycleTransaction.t()] - def lifecycle_transactions(l1_transaction_hashes) when is_list(l1_transaction_hashes) do - query = - from( - lt in LifecycleTransaction, - where: lt.hash in ^l1_transaction_hashes - ) - - Repo.all(query) - end - - @doc """ - Reads a list of transactions executing L2-to-L1 messages by their IDs. - - ## Parameters - - `message_ids`: A list of IDs to retrieve executing transactions for. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from - the input list. The output list may be smaller than the input list if some IDs do not - correspond to any existing transactions. - """ - @spec l1_executions(maybe_improper_list(non_neg_integer(), [])) :: [L1Execution.t()] - def l1_executions(message_ids) when is_list(message_ids) do - query = - from( - ex in L1Execution, - where: ex.message_id in ^message_ids - ) - - query - # :required is used since execution records in the table are created only when - # the corresponding execution transaction is indexed - |> Chain.join_associations(%{:execution_transaction => :required}) - |> Repo.all() - end - - @doc """ - Determines the next index for the L1 transaction available in the `arbitrum_lifecycle_l1_transactions` table. - - ## Returns - - The next available index. If there are no L1 transactions imported yet, it will return `1`. - """ - @spec next_lifecycle_transaction_id() :: non_neg_integer - def next_lifecycle_transaction_id do - query = - from(lt in LifecycleTransaction, - select: lt.id, - order_by: [desc: lt.id], - limit: 1 - ) - - last_id = - query - |> Repo.one() - |> Kernel.||(0) - - last_id + 1 - end - - @doc """ - Retrieves unfinalized L1 transactions from the `LifecycleTransaction` table that are - involved in changing the statuses of rollup blocks or transactions. - - An L1 transaction is considered unfinalized if it has not yet reached a state where - it is permanently included in the blockchain, meaning it is still susceptible to - potential reorganization or change. Transactions are evaluated against the `finalized_block` - parameter to determine their finalized status. - - ## Parameters - - `finalized_block`: The L1 block number above which transactions are considered finalized. - Transactions in blocks higher than this number are not included in the results. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` representing unfinalized transactions, - or `[]` if no unfinalized transactions are found. - """ - @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [LifecycleTransaction.t()] - def lifecycle_unfinalized_transactions(finalized_block) - when is_integer(finalized_block) and finalized_block >= 0 do - query = - from( - lt in LifecycleTransaction, - where: lt.block_number <= ^finalized_block and lt.status == :unfinalized - ) - - Repo.all(query) - end - - @doc """ - Gets the rollup block number by the hash of the block. Lookup is performed only - for blocks explicitly included in a batch, i.e., the batch has been identified by - the corresponding fetcher. The function may return `nil` as a successful response - if the batch containing the rollup block has not been indexed yet. - - ## Parameters - - `block_hash`: The hash of a block included in the batch. - - ## Returns - - the number of the rollup block corresponding to the given hash or `nil` if the - block or batch were not indexed yet. - """ - @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil - def rollup_block_hash_to_num(block_hash) when is_binary(block_hash) do - query = - from( - fb in FullBlock, - inner_join: rb in BatchBlock, - on: rb.block_number == fb.number, - select: fb.number, - where: fb.hash == ^block_hash - ) - - query - |> Repo.one() - end - - @doc """ - Checks if the numbers from the provided list correspond to the numbers of indexed batches. - - ## Parameters - - `batches_numbers`: The list of batch numbers. - - ## Returns - - A list of batch numbers that are indexed and match the provided list, or `[]` - if none of the batch numbers in the provided list exist in the database. The output list - may be smaller than the input list. - """ - @spec batches_exist(maybe_improper_list(non_neg_integer(), [])) :: [non_neg_integer] - def batches_exist(batches_numbers) when is_list(batches_numbers) do - query = - from( - batch in L1Batch, - select: batch.number, - where: batch.number in ^batches_numbers - ) - - query - |> Repo.all() - end - - @doc """ - Retrieves the batch in which the rollup block, identified by the given block number, was included. - - ## Parameters - - `number`: The number of a rollup block. - - ## Returns - - An instance of `Explorer.Chain.Arbitrum.L1Batch` representing the batch containing - the specified rollup block number, or `nil` if no corresponding batch is found. - """ - @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: L1Batch.t() | nil - def get_batch_by_rollup_block_number(number) - when is_integer(number) and number >= 0 do - query = - from(batch in L1Batch, - # end_block has higher number than start_block - where: batch.end_block >= ^number and batch.start_block <= ^number - ) - - query - # :required is used since the situation when commit transaction is not found is not possible - |> Chain.join_associations(%{:commitment_transaction => :required}) - |> Repo.one() - end - - @doc """ - Retrieves the batch by its number. - - ## Parameters - - `number`: The number of a rollup batch. - - ## Returns - - An instance of `Explorer.Chain.Arbitrum.L1Batch`, or `nil` if no batch with - such a number is found. - """ - @spec get_batch_by_number(non_neg_integer()) :: L1Batch.t() | nil - def get_batch_by_number(number) do - query = - from(batch in L1Batch, - where: batch.number == ^number - ) - - query - |> Repo.one() - end - - @doc """ - Retrieves the L1 block number where the confirmation transaction of the highest confirmed rollup block was included. - - ## Returns - - The L1 block number if a confirmed rollup block is found and the confirmation transaction is indexed; - `nil` if no confirmed rollup blocks are found or if there is a database inconsistency. - """ - @spec l1_block_of_latest_confirmed_block() :: FullBlock.block_number() | nil - def l1_block_of_latest_confirmed_block do - query = - from( - rb in BatchBlock, - where: not is_nil(rb.confirmation_id), - order_by: [desc: rb.block_number], - limit: 1 - ) - - case query - # :required is used since existence of the confirmation id is checked above - |> Chain.join_associations(%{:confirmation_transaction => :required}) - |> Repo.one(timeout: :infinity) do - nil -> - nil - - block -> - case block.confirmation_transaction do - # `nil` and `%Ecto.Association.NotLoaded{}` indicate DB inconsistency - nil -> nil - %Ecto.Association.NotLoaded{} -> nil - confirmation_transaction -> confirmation_transaction.block_number - end - end - end - - @doc """ - Retrieves the number of the highest confirmed rollup block. - - ## Returns - - The number of the highest confirmed rollup block, or `nil` if no confirmed rollup blocks are found. - """ - @spec highest_confirmed_block() :: FullBlock.block_number() | nil - def highest_confirmed_block do - query = - from( - rb in BatchBlock, - where: not is_nil(rb.confirmation_id), - select: rb.block_number, - order_by: [desc: rb.block_number], - limit: 1 - ) - - query - |> Repo.one() - end - - @doc """ - Retrieves the number of the latest L1 block where a transaction executing an L2-to-L1 message was discovered. - - ## Returns - - The number of the latest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. - """ - @spec l1_block_of_latest_execution() :: FullBlock.block_number() | nil - def l1_block_of_latest_execution do - query = - from( - transaction in LifecycleTransaction, - inner_join: ex in L1Execution, - on: transaction.id == ex.execution_id, - select: transaction.block_number, - order_by: [desc: transaction.block_number], - limit: 1 - ) - - query - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves the number of the earliest L1 block where a transaction executing an L2-to-L1 message was discovered. - - ## Returns - - The number of the earliest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. - """ - @spec l1_block_of_earliest_execution() :: FullBlock.block_number() | nil - def l1_block_of_earliest_execution do - query = - from( - transaction in LifecycleTransaction, - inner_join: ex in L1Execution, - on: transaction.id == ex.execution_id, - select: transaction.block_number, - order_by: [asc: transaction.block_number], - limit: 1 - ) - - query - |> Repo.one(timeout: :infinity) - end - - @doc """ - Retrieves all unconfirmed rollup blocks within the specified range from `first_block` to `last_block`, - inclusive, where `first_block` is less than or equal to `last_block`. - - Since the function relies on the block data generated by the block fetcher, the returned list - may contain fewer blocks than actually exist if some of the blocks have not been indexed by the fetcher yet. - - ## Parameters - - `first_block`: The rollup block number starting the lookup range. - - `last_block`:The rollup block number ending the lookup range. - - ## Returns - - A list of maps containing the batch number, rollup block number and hash for each - unconfirmed block within the range. Returns `[]` if no unconfirmed blocks are found - within the range, or if the block fetcher has not indexed them. - """ - @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [BatchBlock.t()] - def unconfirmed_rollup_blocks(first_block, last_block) - when is_integer(first_block) and first_block >= 0 and - is_integer(last_block) and first_block <= last_block do - query = - from( - rb in BatchBlock, - where: rb.block_number >= ^first_block and rb.block_number <= ^last_block and is_nil(rb.confirmation_id), - order_by: [asc: rb.block_number] - ) - - Repo.all(query) - end - - @doc """ - Calculates the number of confirmed rollup blocks in the specified batch. - - ## Parameters - - `batch_number`: The number of the batch for which the count of confirmed blocks is to be calculated. - - ## Returns - - The number of confirmed blocks in the batch with the given number. - """ - @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer - def count_confirmed_rollup_blocks_in_batch(batch_number) - when is_integer(batch_number) and batch_number >= 0 do - query = - from( - rb in BatchBlock, - where: rb.batch_number == ^batch_number and not is_nil(rb.confirmation_id) - ) - - Repo.aggregate(query, :count) - end - - @doc """ - Retrieves all L2-to-L1 messages with the specified status. - - If `block_number` is not `nil`, only messages originating in rollup blocks with - numbers not higher than the specified block are considered. Otherwise, all - messages are considered. - - ## Parameters - - `status`: The status of the messages to retrieve, such as `:initiated`, - `:sent`, `:confirmed`, or `:relayed`. - - `block_number`: The number of a rollup block that limits the messages lookup, - or `nil`. - - ## Returns - - Instances of `Explorer.Chain.Arbitrum.Message` corresponding to the criteria, - or `[]` if no messages with the given status are found. - """ - @spec l2_to_l1_messages(:confirmed | :initiated | :relayed | :sent, FullBlock.block_number() | nil) :: [ - Message.t() - ] - def l2_to_l1_messages(status, block_number) - when status in [:initiated, :sent, :confirmed, :relayed] and - is_integer(block_number) and - block_number >= 0 do - query = - from(msg in Message, - where: - msg.direction == :from_l2 and msg.originating_transaction_block_number <= ^block_number and - msg.status == ^status, - order_by: [desc: msg.message_id] - ) - - Repo.all(query) - end - - def l2_to_l1_messages(status, nil) when status in [:initiated, :sent, :confirmed, :relayed] do - query = - from(msg in Message, - where: msg.direction == :from_l2 and msg.status == ^status, - order_by: [desc: msg.message_id] - ) - - Repo.all(query) - end - - @doc """ - Retrieves the numbers of the L1 blocks containing the confirmation transactions - bounding the first interval where missed confirmation transactions could be found. - - The absence of a confirmation transaction is assumed based on the analysis of a - series of confirmed rollup blocks. For example, if blocks 0-3 are confirmed by transaction X, - blocks 7-9 by transaction Y, and blocks 12-15 by transaction Z, there are two gaps: - blocks 4-6 and 10-11. According to Arbitrum's nature, this indicates that the confirmation - transactions for blocks 6 and 11 have not yet been indexed. - - In the example above, the function will return the tuple with the numbers of the L1 blocks - where transactions Y and Z were included. - - ## Returns - - A tuple of the L1 block numbers between which missing confirmation transactions are suspected, - or `nil` if no gaps in confirmed blocks are found or if there are no missed confirmation transactions. - """ - @spec l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() :: - {FullBlock.block_number() | nil, FullBlock.block_number()} | nil - def l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap do - # The first subquery retrieves the numbers of confirmed rollup blocks. - rollup_blocks_query = - from( - rb in BatchBlock, - select: %{ - block_number: rb.block_number, - confirmation_id: rb.confirmation_id - }, - where: not is_nil(rb.confirmation_id) - ) - - # The second subquery builds on the first one, grouping block numbers by their - # confirmation transactions. As a result, it identifies the starting and ending - # rollup blocks for every transaction. - confirmed_ranges_query = - from( - subquery in subquery(rollup_blocks_query), - select: %{ - confirmation_id: subquery.confirmation_id, - min_block_num: min(subquery.block_number), - max_block_num: max(subquery.block_number) - }, - group_by: subquery.confirmation_id - ) - - # The third subquery utilizes the window function LAG to associate each confirmation - # transaction with the starting rollup block of the preceding transaction. - confirmed_combined_ranges_query = - from( - subquery in subquery(confirmed_ranges_query), - select: %{ - confirmation_id: subquery.confirmation_id, - min_block_num: subquery.min_block_num, - max_block_num: subquery.max_block_num, - prev_max_number: fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.max_block_num, subquery.min_block_num), - prev_confirmation_id: - fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.confirmation_id, subquery.min_block_num) - } - ) - - # The final query identifies confirmation transactions for which the ending block does - # not precede the starting block of the subsequent confirmation transaction. - main_query = - from( - subquery in subquery(confirmed_combined_ranges_query), - inner_join: current_transaction in LifecycleTransaction, - on: subquery.confirmation_id == current_transaction.id, - left_join: previous_transaction in LifecycleTransaction, - on: subquery.prev_confirmation_id == previous_transaction.id, - select: {previous_transaction.block_number, current_transaction.block_number}, - where: subquery.min_block_num - 1 != subquery.prev_max_number or is_nil(subquery.prev_max_number), - order_by: [desc: subquery.min_block_num], - limit: 1 - ) - - main_query - |> Repo.one() - end - - @doc """ - Retrieves the count of cross-chain messages either sent to or from the rollup. - - ## Parameters - - `direction`: A string that specifies the message direction; can be "from-rollup" or "to-rollup". - - `options`: A keyword list of options that may include whether to use a replica database. - - ## Returns - - The total count of cross-chain messages. - """ - @spec messages_count(binary(), api?: boolean()) :: non_neg_integer() - def messages_count(direction, options) when direction == "from-rollup" and is_list(options) do - do_messages_count(:from_l2, options) - end - - def messages_count(direction, options) when direction == "to-rollup" and is_list(options) do - do_messages_count(:to_l2, options) - end - - # Counts the number of cross-chain messages based on the specified direction. - @spec do_messages_count(:from_l2 | :to_l2, api?: boolean()) :: non_neg_integer() - defp do_messages_count(direction, options) do - Message - |> where([msg], msg.direction == ^direction) - |> select_repo(options).aggregate(:count) - end - - @doc """ - Retrieves cross-chain messages based on the specified direction. - - This function constructs and executes a query to retrieve messages either sent - to or from the rollup layer, applying pagination options. These options dictate - not only the number of items to retrieve but also how many items to skip from - the top. - - ## Parameters - - `direction`: A string that can be "from-rollup" or "to-rollup", translated internally to `:from_l2` or `:to_l2`. - - `options`: A keyword list specifying pagination details and database preferences. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.Message` entries. - """ - @spec messages(binary(), - paging_options: PagingOptions.t(), - api?: boolean() - ) :: [Message.t()] - def messages(direction, options) when direction == "from-rollup" do - do_messages(:from_l2, options) - end - - def messages(direction, options) when direction == "to-rollup" do - do_messages(:to_l2, options) - end - - # Executes the query to fetch cross-chain messages based on the specified direction. - # - # This function constructs and executes a query to retrieve messages either sent - # to or from the rollup layer, applying pagination options. These options dictate - # not only the number of items to retrieve but also how many items to skip from - # the top. - # - # ## Parameters - # - `direction`: Can be either `:from_l2` or `:to_l2`, indicating the direction of the messages. - # - `options`: A keyword list of options specifying pagination details and whether to use a replica database. - # - # ## Returns - # - A list of `Explorer.Chain.Arbitrum.Message` entries matching the specified direction. - @spec do_messages(:from_l2 | :to_l2, - paging_options: PagingOptions.t(), - api?: boolean() - ) :: [Message.t()] - defp do_messages(direction, options) do - base_query = - from(msg in Message, - where: msg.direction == ^direction, - order_by: [desc: msg.message_id] - ) - - paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) - - query = - base_query - |> page_messages(paging_options) - |> limit(^paging_options.page_size) - - select_repo(options).all(query) - end - - defp page_messages(query, %PagingOptions{key: nil}), do: query - - defp page_messages(query, %PagingOptions{key: {id}}) do - from(msg in query, where: msg.message_id < ^id) - end - - @doc """ - Retrieves a list of relayed L1 to L2 messages that have been completed. - - ## Parameters - - `options`: A keyword list of options specifying whether to use a replica database and how pagination should be handled. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.Message` representing relayed messages from L1 to L2 that have been completed. - """ - @spec relayed_l1_to_l2_messages( - paging_options: PagingOptions.t(), - api?: boolean() - ) :: [Message.t()] - def relayed_l1_to_l2_messages(options) do - paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) - - query = - from(msg in Message, - where: msg.direction == :to_l2 and not is_nil(msg.completion_transaction_hash), - order_by: [desc: msg.message_id], - limit: ^paging_options.page_size - ) - - select_repo(options).all(query) - end - - @doc """ - Retrieves the transaction hashes for missed L1-to-L2 messages within a specified - block range. - - The function identifies missed messages by checking transactions of specific - types that are supposed to contain L1-to-L2 messages and verifying if there are - corresponding entries in the messages table. A message is considered missed if - there is a transaction without a matching message record within the specified - block range. - - ## Parameters - - `start_block`: The starting block number of the range. - - `end_block`: The ending block number of the range. - - ## Returns - - A list of transaction hashes for missed L1-to-L2 messages. - """ - @spec transactions_for_missed_messages_to_l2(non_neg_integer(), non_neg_integer()) :: [Hash.t()] - def transactions_for_missed_messages_to_l2(start_block, end_block) do - missed_messages_to_l2_query() - |> where( - [rollup_transaction], - rollup_transaction.block_number >= ^start_block and rollup_transaction.block_number <= ^end_block - ) - |> order_by(desc: :block_timestamp) - |> select([rollup_transaction], rollup_transaction.hash) - |> Repo.all() - end - - # Constructs a query to retrieve missed L1-to-L2 messages. - # - # The function constructs a query to identify missing messages by checking - # transactions of specific types that are supposed to contain L1-to-L2 - # messages and verifying if there are corresponding entries in the messages - # table. A message is considered missed if there is a transaction without a - # matching message record. - # - # ## Returns - # - A query to retrieve missed L1-to-L2 messages. - @spec missed_messages_to_l2_query() :: Ecto.Query.t() - defp missed_messages_to_l2_query do - from(rollup_transaction in Transaction, - left_join: msg in Message, - on: rollup_transaction.hash == msg.completion_transaction_hash and msg.direction == :to_l2, - where: rollup_transaction.type in @to_l2_messages_transaction_types and is_nil(msg.completion_transaction_hash) - ) - end - - @doc """ - Retrieves the logs for missed L2-to-L1 messages within a specified block range. - - The function identifies missed messages by checking logs for the specified - L2-to-L1 event and verifying if there are corresponding entries in the messages - table. A message is considered missed if there is a log entry without a - matching message record within the specified block range. - - ## Parameters - - `start_block`: The starting block number of the range. - - `end_block`: The ending block number of the range. - - `arbsys_contract`: The address of the Arbitrum system contract. - - `l2_to_l1_event`: The event identifier for L2-to-L1 messages. - - ## Returns - - A list of logs for missed L2-to-L1 messages. - """ - @spec logs_for_missed_messages_from_l2(non_neg_integer(), non_neg_integer(), binary(), binary()) :: [Log.t()] - def logs_for_missed_messages_from_l2(start_block, end_block, arbsys_contract, l2_to_l1_event) do - # credo:disable-for-lines:5 Credo.Check.Refactor.PipeChainStart - missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event, start_block, end_block) - |> where([log, msg], log.block_number >= ^start_block and log.block_number <= ^end_block) - |> order_by(desc: :block_number, desc: :index) - |> select([log], log) - |> Repo.all() - end - - # Constructs a query to retrieve missed L2-to-L1 messages. - # - # The function constructs a query to identify missing messages by checking logs - # for the specified L2-to-L1 and verifying if there are corresponding entries - # in the messages table within a given block range, or among all messages if no - # block range is provided. A message is considered missed if there is a log - # entry without a matching message record. - # - # ## Parameters - # - `arbsys_contract`: The address hash of the Arbitrum system contract. - # - `l2_to_l1_event`: The event identifier for L2 to L1 messages. - # - `start_block`: The starting block number for the search range (optional). - # - `end_block`: The ending block number for the search range (optional). - # - # ## Returns - # - A query to retrieve missed L2-to-L1 messages. - @spec missed_messages_from_l2_query(binary(), binary(), non_neg_integer() | nil, non_neg_integer() | nil) :: - Ecto.Query.t() - defp missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event, start_block \\ nil, end_block \\ nil) do - # It is assumed that all the messages from the same transaction are handled - # atomically so there is no need to check the message_id for each log entry. - # Otherwise, the join condition must be extended with - # fragment("encode(l0.fourth_topic, 'hex') = LPAD(TO_HEX(a1.message_id::BIGINT), 64, '0')") - base_condition = - dynamic([log, msg], log.transaction_hash == msg.originating_transaction_hash and msg.direction == :from_l2) - - join_condition = - if is_nil(start_block) or is_nil(end_block) do - base_condition - else - dynamic( - [_, msg], - ^base_condition and - msg.originating_transaction_block_number >= ^start_block and - msg.originating_transaction_block_number <= ^end_block - ) - end - - from(log in Log, - left_join: msg in Message, - on: ^join_condition, - where: - log.address_hash == ^arbsys_contract and log.first_topic == ^l2_to_l1_event and - is_nil(msg.originating_transaction_hash) - ) - end - - @doc """ - Retrieves the total count of rollup batches indexed up to the current moment. - - This function uses an estimated count from system catalogs if available. - If the estimate is unavailable, it performs an exact count using an aggregate query. - - ## Parameters - - `options`: A keyword list specifying options, including whether to use a replica database. - - ## Returns - - The count of indexed batches. - """ - @spec batches_count(api?: boolean()) :: non_neg_integer() - def batches_count(options) do - Chain.get_table_rows_total_count(L1Batch, options) - end - - @doc """ - Retrieves a specific batch by its number or fetches the latest batch if `:latest` is specified. - - ## Parameters - - `number`: Can be either the specific batch number or `:latest` to retrieve - the most recent batch in the database. - - `options`: A keyword list specifying the necessity for joining associations - and whether to use a replica database. - - ## Returns - - `{:ok, Explorer.Chain.Arbitrum.L1Batch}` if the batch is found. - - `{:error, :not_found}` if no batch with the specified number exists. - """ - def batch(number, options) - - @spec batch(:latest, api?: boolean()) :: {:error, :not_found} | {:ok, L1Batch.t()} - def batch(:latest, options) do - L1Batch - |> order_by(desc: :number) - |> limit(1) - |> select_repo(options).one() - |> case do - nil -> {:error, :not_found} - batch -> {:ok, batch} - end - end - - @spec batch(binary() | non_neg_integer(), - necessity_by_association: %{atom() => :optional | :required}, - api?: boolean() - ) :: {:error, :not_found} | {:ok, L1Batch.t()} - def batch(number, options) do - necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) - - L1Batch - |> where(number: ^number) - |> Chain.join_associations(necessity_by_association) - |> select_repo(options).one() - |> case do - nil -> {:error, :not_found} - batch -> {:ok, batch} - end - end - - @doc """ - Retrieves a list of batches from the database. - - This function constructs and executes a query to retrieve batches based on provided - pagination options. These options dictate not only the number of items to retrieve - but also how many items to skip from the top. If the `committed?` option is set to true, - it returns the ten most recent committed batches; otherwise, it fetches batches as - dictated by other pagination parameters. - - ## Parameters - - `options`: A keyword list of options specifying pagination, necessity for joining associations, - and whether to use a replica database. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.L1Batch` entries, filtered and ordered according to the provided options. - """ - @spec batches( - necessity_by_association: %{atom() => :optional | :required}, - committed?: boolean(), - paging_options: PagingOptions.t(), - api?: boolean() - ) :: [L1Batch.t()] - def batches(options) do - necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) - - base_query = - from(batch in L1Batch, - order_by: [desc: batch.number] - ) - - query = - if Keyword.get(options, :committed?, false) do - base_query - |> Chain.join_associations(necessity_by_association) - |> where([batch], not is_nil(batch.commitment_id) and batch.commitment_id > 0) - |> limit(10) - else - paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) - - base_query - |> Chain.join_associations(necessity_by_association) - |> page_batches(paging_options) - |> limit(^paging_options.page_size) - end - - select_repo(options).all(query) - end - - defp page_batches(query, %PagingOptions{key: nil}), do: query - - defp page_batches(query, %PagingOptions{key: {number}}) do - from(batch in query, where: batch.number < ^number) - end - - @doc """ - Retrieves a list of rollup transactions included in a specific batch. - - ## Parameters - - `batch_number`: The batch number whose transactions were included in L1. - - `options`: A keyword list specifying options, including whether to use a replica database. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.BatchTransaction` entries belonging to the specified batch. - """ - @spec batch_transactions(non_neg_integer() | binary(), api?: boolean()) :: [BatchTransaction.t()] - def batch_transactions(batch_number, options) do - query = from(transaction in BatchTransaction, where: transaction.batch_number == ^batch_number) - - select_repo(options).all(query) - end - - @doc """ - Retrieves a list of rollup blocks included in a specific batch. - - This function constructs and executes a database query to retrieve a list of rollup blocks, - considering pagination options specified in the `options` parameter. These options dictate - the number of items to retrieve and how many items to skip from the top. - - ## Parameters - - `batch_number`: The batch number whose transactions are included on L1. - - `options`: A keyword list of options specifying pagination, association necessity, and - whether to use a replica database. - - ## Returns - - A list of `Explorer.Chain.Block` entries belonging to the specified batch. - """ - @spec batch_blocks(non_neg_integer() | binary(), - necessity_by_association: %{atom() => :optional | :required}, - api?: boolean(), - paging_options: PagingOptions.t() - ) :: [FullBlock.t()] - def batch_blocks(batch_number, options) do - necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) - paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) - - query = - from( - fb in FullBlock, - inner_join: rb in BatchBlock, - on: fb.number == rb.block_number, - select: fb, - where: fb.consensus == true and rb.batch_number == ^batch_number - ) - - query - |> FullBlock.block_type_filter("Block") - |> page_blocks(paging_options) - |> limit(^paging_options.page_size) - |> order_by(desc: :number) - |> Chain.join_associations(necessity_by_association) - |> select_repo(options).all() - end - - defp page_blocks(query, %PagingOptions{key: nil}), do: query - - defp page_blocks(query, %PagingOptions{key: {block_number}}) do - where(query, [block], block.number < ^block_number) - end - - @doc """ - Retrieves an AnyTrust keyset from the database using the provided keyset hash. - - ## Parameters - - `keyset_hash`: A binary representing the hash of the keyset to be retrieved. - - ## Returns - - A map containing information about the AnyTrust keyset, otherwise an empty map. - """ - @spec get_anytrust_keyset(binary()) :: map() | nil - def get_anytrust_keyset("0x" <> <<_::binary-size(64)>> = keyset_hash) do - get_anytrust_keyset(keyset_hash |> Chain.string_to_block_hash() |> Kernel.elem(1) |> Map.get(:bytes)) - end - - def get_anytrust_keyset(keyset_hash) do - query = - from( - da_records in DaMultiPurposeRecord, - where: da_records.data_key == ^keyset_hash and da_records.data_type == 1 - ) - - case Repo.one(query) do - nil -> %{} - keyset -> keyset.data - end - end - - @doc """ - Retrieves Data Availability (DA) information from the database using the provided - batch number. - - ## Parameters - - `batch_number`: The batch number to be used for retrieval. - - ## Returns - - A map containing the DA information if found, otherwise an empty map. - """ - @spec get_da_info_by_batch_number(non_neg_integer()) :: map() - def get_da_info_by_batch_number(batch_number) do - query = - from( - da_records in DaMultiPurposeRecord, - where: da_records.batch_number == ^batch_number and da_records.data_type == 0 - ) - - case Repo.one(query) do - nil -> %{} - keyset -> keyset.data - end - end - - @doc """ - Retrieves a Data Availability (DA) record from the database using the provided - data key. - - ## Parameters - - `data_key`: The key of the data to be retrieved. - - ## Returns - - `{:ok, {batch_number, da_info}}`, where - - `batch_number` is the number of the batch associated with the DA record - - `da_info` is a map containing the DA record. - - `{:error, :not_found}` if no record with the specified `data_key` exists. - """ - @spec get_da_record_by_data_key(binary(), api?: boolean()) :: {:ok, {non_neg_integer(), map()}} | {:error, :not_found} - def get_da_record_by_data_key("0x" <> _ = data_key, options) do - data_key_bytes = data_key |> Chain.string_to_block_hash() |> Kernel.elem(1) |> Map.get(:bytes) - get_da_record_by_data_key(data_key_bytes, options) - end - - def get_da_record_by_data_key(data_key, options) do - query = - from( - da_records in DaMultiPurposeRecord, - where: da_records.data_key == ^data_key and da_records.data_type == 0 - ) - - case select_repo(options).one(query) do - nil -> {:error, :not_found} - keyset -> {:ok, {keyset.batch_number, keyset.data}} - end - end - - @doc """ - Retrieves the batch numbers of missing L1 batches within a specified range. - - This function constructs a query to find the batch numbers of L1 batches that - are missing within the given range of batch numbers. It uses a right join with - a generated series to identify batch numbers that do not exist in the - `arbitrum_l1_batches` table. - - ## Parameters - - `start_batch_number`: The starting batch number of the search range. - - `end_batch_number`: The ending batch number of the search range. - - ## Returns - - A list of batch numbers in ascending order that are missing within the specified range. - """ - @spec find_missing_batches(non_neg_integer(), non_neg_integer()) :: [non_neg_integer()] - def find_missing_batches(start_batch_number, end_batch_number) - when is_integer(start_batch_number) and is_integer(end_batch_number) and end_batch_number >= start_batch_number do - query = - from(batch in L1Batch, - right_join: - missing_range in fragment( - """ - ( - SELECT distinct b1.number - FROM generate_series((?)::integer, (?)::integer) AS b1(number) - WHERE NOT EXISTS - (SELECT 1 FROM arbitrum_l1_batches b2 WHERE b2.number=b1.number) - ORDER BY b1.number DESC - ) - """, - ^start_batch_number, - ^end_batch_number - ), - on: batch.number == missing_range.number, - select: missing_range.number, - order_by: missing_range.number, - distinct: missing_range.number - ) - - query - |> Repo.all() - end - - @doc """ - Retrieves L1 block numbers for the given list of batch numbers. - - This function finds the numbers of L1 blocks that include L1 transactions - associated with batches within the specified list of batch numbers. - - ## Parameters - - `batch_numbers`: A list of batch numbers for which to retrieve the L1 block numbers. - - ## Returns - - A map where the keys are batch numbers and the values are corresponding L1 block numbers. - """ - @spec get_l1_blocks_of_batches_by_numbers([non_neg_integer()]) :: %{non_neg_integer() => FullBlock.block_number()} - def get_l1_blocks_of_batches_by_numbers(batch_numbers) when is_list(batch_numbers) do - query = - from(batch in L1Batch, - join: l1tx in assoc(batch, :commitment_transaction), - where: batch.number in ^batch_numbers, - select: {batch.number, l1tx.block_number} - ) - - query - |> Repo.all() - |> Enum.reduce(%{}, fn {batch_number, l1_block_number}, acc -> - Map.put(acc, batch_number, l1_block_number) - end) - end - - @doc """ - Retrieves the minimum and maximum batch numbers of L1 batches. - - ## Returns - - A tuple containing the minimum and maximum batch numbers or `{nil, nil}` if no batches are found. - """ - @spec get_min_max_batch_numbers() :: {non_neg_integer(), non_neg_integer()} | {nil | nil} - def get_min_max_batch_numbers do - query = - from(batch in L1Batch, - select: {min(batch.number), max(batch.number)} - ) - - Repo.one(query, timeout: :infinity) - end - - ##################################################################################### - ### Below are the functions that implement functionality not specific to Arbitrum ### - ##################################################################################### - - @doc """ - Checks if a block with the given block number exists. - - This function queries the database to determine if a block with the specified - block number exists and has been marked as having reached consensus. - - ## Parameters - - `block_number`: The number of the block to check. - - ## Returns - - `true` if the block exists and has reached consensus. - - `false` otherwise. - """ - @spec rollup_block_exists?(FullBlock.block_number()) :: boolean() - def rollup_block_exists?(block_number) do - query = - from( - block in FullBlock, - where: block.number == ^block_number and block.consensus == true - ) - - Repo.exists?(query, timeout: :infinity) - end - - @doc """ - Retrieves full details of rollup blocks, including associated transactions, for each - block number specified in the input list. - - ## Parameters - - `list_of_block_numbers`: A list of block numbers for which full block details are to be retrieved. - - ## Returns - - A list of `Explorer.Chain.Block` instances containing detailed information for each - block number in the input list. Returns an empty list if no blocks are found for the given numbers. - """ - @spec rollup_blocks([FullBlock.block_number()]) :: [FullBlock.t()] - def rollup_blocks(list_of_block_numbers) - - def rollup_blocks([]), do: [] - - def rollup_blocks(list_of_block_numbers) do - query = - from( - block in FullBlock, - where: block.number in ^list_of_block_numbers - ) - - query - # :optional is used since a block may not have any transactions - |> Chain.join_associations(%{:transactions => :optional}) - |> Repo.all() - end - - @doc """ - Retrieves the message IDs of uncompleted L1-to-L2 messages. - - ## Returns - - A list of the message IDs of uncompleted L1-to-L2 messages. - """ - @spec get_uncompleted_l1_to_l2_messages_ids() :: [non_neg_integer()] - def get_uncompleted_l1_to_l2_messages_ids do - query = - from(msg in Message, - where: msg.direction == :to_l2 and is_nil(msg.completion_transaction_hash), - select: msg.message_id - ) - - Repo.all(query) - end -end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/README.md b/apps/explorer/lib/explorer/chain/arbitrum/reader/README.md new file mode 100644 index 000000000000..9f2ee86a409d --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/README.md @@ -0,0 +1,40 @@ +# Arbitrum Reader Modules + +This directory contains modules that provide structured access to Arbitrum-specific data stored in the Blockscout database. + +## Module Overview + +- `api/` - API endpoint-specific functions: + - `messages.ex` - Cross-chain message queries + - `settlement.ex` - Batch management, DA blob data, and rollup blocks + - `general.ex` - General utility functions like transaction log queries +- `common.ex` - Core query functionality shared between different components (API, Indexer) with configurable database selection +- `indexer/messages.ex` - Cross-chain message handling +- `indexer/parent_chain_transactions.ex` - L1 transaction lifecycle +- `indexer/settlement.ex` - Batch and state confirmation data +- `indexer/general.ex` - Chain-agnostic functions + +## Important Usage Note + +Functions in the `indexer/` modules should not be called directly. Instead, use the corresponding wrapper functions provided in the `Explorer.Chain.Indexer.Fetcher.Arbitrum.Utils.Db` module. The wrapper functions provide: + +- Additional data transformation specific to indexer needs +- Enhanced error handling + +This separation ensures that database operations are properly handled and maintains a clear boundary between raw database access and indexer-specific business logic. + +## Module Organization + +The reader functionality is split across multiple modules rather than maintained in a single monolithic file for two primary reasons: + +### 1. Collaborative Development + +Splitting functionality across multiple files significantly reduces the likelihood of merge conflicts when multiple developers are working on different features simultaneously. Each module can be modified independently without affecting other parts of the codebase. + +### 2. LLM-Based Development Optimization + +The modular structure is specifically designed to work better with Large Language Model (LLM) based coding assistants: + +- **Output Token Efficiency**: While modern LLMs can handle large files in their input context, they still have limitations on output tokens. Smaller files make it easier for AI assistants to propose and explain changes within these limits. + +- **Focus Window Management**: Smaller, focused modules help maintain a clear context window when working with AI assistants, making it easier to discuss and modify specific functionality without the noise of unrelated code. \ No newline at end of file diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/api/general.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/general.ex new file mode 100644 index 000000000000..3a555e720ab2 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/general.ex @@ -0,0 +1,44 @@ +defmodule Explorer.Chain.Arbitrum.Reader.API.General do + @moduledoc """ + Provides API-specific functions for querying general Arbitrum data from the database. + + Below These functions that implement functionality not specific to Arbitrum. They are + candidates for moving to a chain-agnostic module as soon as such need arises. All + functions in this module enforce the use of replica databases for read + operations by automatically passing the `api?: true` option to database queries. + + Note: If any function from this module needs to be used outside of API handlers, + it should be moved to `Explorer.Chain.Arbitrum.Reader.Common` with configurable + database selection, and a wrapper function should be created in this module + (see `Explorer.Chain.Arbitrum.Reader.API.Settlement.highest_confirmed_block/0` as an example). + """ + + import Ecto.Query, only: [order_by: 2, where: 3] + import Explorer.Chain, only: [select_repo: 1] + + alias Explorer.Chain + alias Explorer.Chain.{Hash, Log} + + @api_true [api?: true] + + @doc """ + Retrieves logs from a transaction that match a specific topic. + + Fetches all logs emitted by the specified transaction that have the given topic + as their first topic, ordered by log index. + + ## Parameters + - `transaction_hash`: The hash of the transaction to fetch logs from + - `topic0`: The first topic to filter logs by + + ## Returns + - A list of matching logs ordered by index, or empty list if none found + """ + @spec transaction_to_logs_by_topic0(Hash.Full.t(), binary()) :: [Log.t()] + def transaction_to_logs_by_topic0(transaction_hash, topic0) do + Chain.log_with_transactions_query() + |> where([log, transaction], transaction.hash == ^transaction_hash and log.first_topic == ^topic0) + |> order_by(asc: :index) + |> select_repo(@api_true).all() + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/api/messages.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/messages.ex new file mode 100644 index 000000000000..cd51e8e0e3d3 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/messages.ex @@ -0,0 +1,183 @@ +defmodule Explorer.Chain.Arbitrum.Reader.API.Messages do + @moduledoc """ + Provides API-specific functions for querying Arbitrum cross-chain message data from the database. + + This module contains functions specifically designed for Blockscout's API endpoints + that handle Arbitrum cross-chain message functionality. All functions in this module + enforce the use of replica databases for read operations by automatically passing + the `api?: true` option to database queries. + + The module includes functions for retrieving: + - L2->L1 messages by transaction hash or message ID + - L1->L2 messages that have been relayed + - Message counts and paginated message lists + + Note: If any function from this module needs to be used outside of API handlers, + it should be moved to `Explorer.Chain.Arbitrum.Reader.Common` with configurable + database selection, and a wrapper function should be created in this module + (see `Explorer.Chain.Arbitrum.Reader.API.Settlement.highest_confirmed_block/0` as an example). + """ + + import Ecto.Query, only: [from: 2, limit: 2, where: 3] + import Explorer.Chain, only: [select_repo: 1] + + alias Explorer.Chain.Arbitrum.Message + alias Explorer.{Chain, PagingOptions} + + @api_true [api?: true] + + @doc """ + Retrieves L2-to-L1 messages initiated by specified transaction. + + The messages are filtered by the originating transaction hash (with any status). + In the common case a transaction can initiate several messages. + + ## Parameters + - `transaction_hash`: The transaction hash which initiated the messages. + + ## Returns + - Instances of `Explorer.Chain.Arbitrum.Message` initiated by the transaction + with the given hash, or `[]` if no messages with the given status are found. + """ + @spec l2_to_l1_messages_by_transaction_hash(Chain.Hash.Full.t()) :: [Message.t()] + def l2_to_l1_messages_by_transaction_hash(transaction_hash) do + query = + from(msg in Message, + where: msg.direction == :from_l2 and msg.originating_transaction_hash == ^transaction_hash, + order_by: [desc: msg.message_id] + ) + + query + |> select_repo(@api_true).all() + end + + @doc """ + Retrieves L2-to-L1 message by message id. + + ## Parameters + - `message_id`: message ID + + ## Returns + - Instance of `Explorer.Chain.Arbitrum.Message` with the provided message id, + or nil if message with the given id doesn't exist. + """ + @spec l2_to_l1_message_by_id(non_neg_integer()) :: Message.t() | nil + def l2_to_l1_message_by_id(message_id) do + query = + from(message in Message, + where: message.direction == :from_l2 and message.message_id == ^message_id + ) + + select_repo(@api_true).one(query) + end + + @doc """ + Retrieves the count of cross-chain messages either sent to or from the rollup. + + ## Parameters + - `direction`: A string that specifies the message direction; can be "from-rollup" or "to-rollup". + + ## Returns + - The total count of cross-chain messages. + """ + @spec messages_count(binary()) :: non_neg_integer() + def messages_count(direction) when direction == "from-rollup" do + do_messages_count(:from_l2) + end + + def messages_count(direction) when direction == "to-rollup" do + do_messages_count(:to_l2) + end + + # Counts the number of cross-chain messages based on the specified direction. + @spec do_messages_count(:from_l2 | :to_l2) :: non_neg_integer() + defp do_messages_count(direction) do + Message + |> where([msg], msg.direction == ^direction) + |> select_repo(@api_true).aggregate(:count) + end + + @doc """ + Retrieves cross-chain messages based on the specified direction. + + This function constructs and executes a query to retrieve messages either sent + to or from the rollup layer, applying pagination options. These options dictate + not only the number of items to retrieve but also how many items to skip from + the top. + + ## Parameters + - `direction`: A string that can be "from-rollup" or "to-rollup", translated internally to `:from_l2` or `:to_l2`. + - `options`: A keyword list which may contain `paging_options` specifying pagination details + + ## Returns + - A list of `Explorer.Chain.Arbitrum.Message` entries. + """ + @spec messages(binary(), paging_options: PagingOptions.t()) :: [Message.t()] + def messages(direction, options) when direction == "from-rollup" do + do_messages(:from_l2, options) + end + + def messages(direction, options) when direction == "to-rollup" do + do_messages(:to_l2, options) + end + + # Executes the query to fetch cross-chain messages based on the specified direction. + # + # This function constructs and executes a query to retrieve messages either sent + # to or from the rollup layer, applying pagination options. These options dictate + # not only the number of items to retrieve but also how many items to skip from + # the top. + # + # ## Parameters + # - `direction`: Can be either `:from_l2` or `:to_l2`, indicating the direction of the messages. + # - `options`: A keyword list which may contain `paging_options` specifying pagination details + # + # ## Returns + # - A list of `Explorer.Chain.Arbitrum.Message` entries matching the specified direction. + @spec do_messages(:from_l2 | :to_l2, paging_options: PagingOptions.t()) :: [Message.t()] + defp do_messages(direction, options) do + base_query = + from(msg in Message, + where: msg.direction == ^direction, + order_by: [desc: msg.message_id] + ) + + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + base_query + |> page_messages(paging_options) + |> limit(^paging_options.page_size) + + select_repo(@api_true).all(query) + end + + defp page_messages(query, %PagingOptions{key: nil}), do: query + + defp page_messages(query, %PagingOptions{key: {id}}) do + from(msg in query, where: msg.message_id < ^id) + end + + @doc """ + Retrieves a list of relayed L1 to L2 messages that have been completed. + + ## Parameters + - `options`: A keyword list which may contain `paging_options` specifying pagination details + + ## Returns + - A list of `Explorer.Chain.Arbitrum.Message` representing relayed messages from L1 to L2 that have been completed. + """ + @spec relayed_l1_to_l2_messages(paging_options: PagingOptions.t()) :: [Message.t()] + def relayed_l1_to_l2_messages(options) do + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + from(msg in Message, + where: msg.direction == :to_l2 and not is_nil(msg.completion_transaction_hash), + order_by: [desc: msg.message_id], + limit: ^paging_options.page_size + ) + + select_repo(@api_true).all(query) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/api/settlement.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/settlement.ex new file mode 100644 index 000000000000..3b376ec4ec76 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/api/settlement.ex @@ -0,0 +1,536 @@ +defmodule Explorer.Chain.Arbitrum.Reader.API.Settlement do + @moduledoc """ + Provides API-specific functions for querying Arbitrum settlement data from the database. + + This module contains functions specifically designed for Blockscout's API endpoints + that handle Arbitrum settlement functionality. All functions in this module enforce + the use of replica databases for read operations by automatically passing the + `api?: true` option to database queries. + + The module includes functions for retrieving: + - L1 batches and their associated transactions + - Data Availability (DA) records and blobs + - Batch-related block information + - Block confirmations on the parent chain + - AnyTrust keysets + + Note: If any function from this module needs to be used outside of API handlers, + it should be moved to `Explorer.Chain.Arbitrum.Reader.Common` with configurable + database selection, and a wrapper function should be created in this module + (see `highest_confirmed_block/0` as an example). + """ + + import Ecto.Query, only: [from: 2, limit: 2, order_by: 2, where: 2, where: 3] + import Explorer.Chain, only: [select_repo: 1] + + alias Explorer.Chain.Arbitrum.{ + BatchBlock, + BatchToDaBlob, + BatchTransaction, + DaMultiPurposeRecord, + L1Batch + } + + alias Explorer.Chain.Arbitrum.Reader.Common + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.Cache.BackgroundMigrations, as: MigrationStatuses + alias Explorer.{Chain, PagingOptions} + + @api_true [api?: true] + + @doc """ + Retrieves the total count of rollup batches indexed up to the current moment. + + This function uses an estimated count from system catalogs if available. + If the estimate is unavailable, it performs an exact count using an aggregate query. + + ## Returns + - The count of indexed batches. + """ + @spec batches_count() :: non_neg_integer() + def batches_count do + Chain.get_table_rows_total_count(L1Batch, @api_true) + end + + @doc """ + Fetches the most recent batch in the database. + + ## Parameters + - `number`: must be always `:latest` + + ## Returns + - `{:ok, Explorer.Chain.Arbitrum.L1Batch}` if the batch is found. + - `{:error, :not_found}` if no batch exists. + """ + @spec batch(:latest) :: {:error, :not_found} | {:ok, L1Batch.t()} + def batch(:latest) do + L1Batch + |> order_by(desc: :number) + |> limit(1) + |> select_repo(@api_true).one() + |> case do + nil -> {:error, :not_found} + batch -> {:ok, batch} + end + end + + @doc """ + Retrieves a specific batch by its number. + + ## Parameters + - `number`: The specific batch number. + - `options`: A keyword list which may contain `necessity_by_association` specifying + the necessity for joining associations + + ## Returns + - `{:ok, Explorer.Chain.Arbitrum.L1Batch}` if the batch is found. + - `{:error, :not_found}` if no batch with the specified number exists. + """ + @spec batch(binary() | non_neg_integer(), necessity_by_association: %{atom() => :optional | :required}) :: + {:error, :not_found} | {:ok, L1Batch.t()} + def batch(number, options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + + L1Batch + |> where(number: ^number) + |> Chain.join_associations(necessity_by_association) + |> select_repo(@api_true).one() + |> case do + nil -> {:error, :not_found} + batch -> {:ok, batch} + end + end + + @doc """ + Retrieves a list of batches from the database. + + This function constructs and executes a query to retrieve batches based on provided + options. These options dictate not only the number of items to retrieve but also + how many items to skip from the top. If the `committed?` option is set to true, + it returns the ten most recent committed batches; otherwise, it fetches batches as + dictated by other pagination parameters. + + If `batch_numbers` option is provided and not empty, the function returns only + batches with the specified numbers, while still applying pagination. + + ## Parameters + - `options`: A keyword list of options: + * `necessity_by_association` - Specifies the necessity for joining associations + * `committed?` - When true, returns only committed batches + * `paging_options` - Specifies pagination details + * `batch_numbers` - Optional list of specific batch numbers to retrieve + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Batch` entries, filtered and ordered according to the provided options. + """ + @spec batches( + necessity_by_association: %{atom() => :optional | :required}, + committed?: boolean(), + paging_options: PagingOptions.t(), + batch_numbers: [non_neg_integer()] | nil + ) :: [L1Batch.t()] + def batches(options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + batch_numbers = Keyword.get(options, :batch_numbers) + + base_query = + if is_list(batch_numbers) and batch_numbers != [] do + from(batch in L1Batch, + where: batch.number in ^batch_numbers, + order_by: [desc: batch.number] + ) + else + from(batch in L1Batch, + order_by: [desc: batch.number] + ) + end + + query = + if Keyword.get(options, :committed?, false) do + base_query + |> Chain.join_associations(necessity_by_association) + |> where([batch], not is_nil(batch.commitment_id) and batch.commitment_id > 0) + |> limit(10) + else + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + base_query + |> Chain.join_associations(necessity_by_association) + |> page_batches(paging_options) + |> limit(^paging_options.page_size) + end + + select_repo(@api_true).all(query) + end + + defp page_batches(query, %PagingOptions{key: nil}), do: query + + defp page_batches(query, %PagingOptions{key: {number}}) do + from(batch in query, where: batch.number < ^number) + end + + @doc """ + Retrieves a list of rollup transactions included in a specific batch. + + ## Parameters + - `batch_number`: The batch number whose transactions are included in L1. + - `options`: A keyword list that is not used in this function. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.BatchTransaction` entries belonging to the specified batch. + """ + @spec batch_transactions(non_neg_integer() | binary(), any()) :: [BatchTransaction.t()] + def batch_transactions(batch_number, _options) do + query = from(transaction in BatchTransaction, where: transaction.batch_number == ^batch_number) + + select_repo(@api_true).all(query) + end + + @doc """ + Retrieves a list of rollup blocks included in a specific batch. + + This function constructs and executes a database query to retrieve a list of rollup blocks, + considering pagination options specified in the `options` parameter. These options dictate + the number of items to retrieve and how many items to skip from the top. + + ## Parameters + - `batch_number`: The batch number whose transactions are included on L1. + - `options`: A keyword list of options specifying pagination and association necessity. + + ## Returns + - A list of `Explorer.Chain.Block` entries belonging to the specified batch. + """ + @spec batch_blocks(non_neg_integer() | binary(), + necessity_by_association: %{atom() => :optional | :required}, + paging_options: PagingOptions.t() + ) :: [FullBlock.t()] + def batch_blocks(batch_number, options) do + necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + paging_options = Keyword.get(options, :paging_options, Chain.default_paging_options()) + + query = + from( + fb in FullBlock, + inner_join: rb in BatchBlock, + on: fb.number == rb.block_number, + select: fb, + where: fb.consensus == true and rb.batch_number == ^batch_number + ) + + query + |> FullBlock.block_type_filter("Block") + |> page_blocks(paging_options) + |> limit(^paging_options.page_size) + |> order_by(desc: :number) + |> Chain.join_associations(necessity_by_association) + |> select_repo(@api_true).all() + end + + defp page_blocks(query, %PagingOptions{key: nil}), do: query + + defp page_blocks(query, %PagingOptions{key: {block_number}}) do + where(query, [block], block.number < ^block_number) + end + + @doc """ + Retrieves a Data Availability (DA) record from the database using the provided + data key. + + Although one data blob could correspond to multiple batches, the current + implementation returns only the first batch number that the data blob is associated + with. + + The function supports both old and new database schemas: + - In the old schema, batch numbers were stored directly in the arbitrum_da_multi_purpose table + - In the new schema, batch-to-blob associations are stored in the arbitrum_batches_to_da_blobs table + + ## Parameters + - `data_key`: The key of the data to be retrieved. + + ## Returns + - `{:ok, {batch_number, da_info}}`, where + - `batch_number` is the number of the batch associated with the DA record + - `da_info` is a map containing the DA record. + - `{:error, :not_found}` if no record with the specified `data_key` exists. + """ + @spec get_da_record_by_data_key(binary()) :: {:ok, {non_neg_integer(), map()}} | {:error, :not_found} + def get_da_record_by_data_key("0x" <> _ = data_key) do + data_key_bytes = data_key |> Chain.string_to_block_hash() |> Kernel.elem(1) |> Map.get(:bytes) + get_da_record_by_data_key(data_key_bytes) + end + + def get_da_record_by_data_key(data_key) do + case MigrationStatuses.get_arbitrum_da_records_normalization_finished() do + true -> + # Migration is complete, use new schema + get_da_record_by_data_key_new_schema(data_key) + + _ -> + # Migration in progress, try old schema first, then fallback to new + case get_da_record_by_data_key_old_schema(data_key) do + {:error, :not_found} -> get_da_record_by_data_key_new_schema(data_key) + result -> result + end + end + end + + # Builds a query to fetch DA records by data key. + # + # This function constructs an Ecto query to retrieve Data Availability blob + # description (type 0) that match a specific data key. + # + # ## Parameters + # - `data_key`: The key of the data to be retrieved. + # + # ## Returns + # - An Ecto query that can be executed to fetch matching DA records. + @spec build_da_records_by_data_key_query(binary()) :: Ecto.Query.t() + defp build_da_records_by_data_key_query(data_key) do + from( + da_records in DaMultiPurposeRecord, + where: da_records.data_key == ^data_key and da_records.data_type == 0 + ) + end + + # Gets DA record using the pre-migration database schema where batch numbers + # were stored directly in the arbitrum_da_multi_purpose table. + # + # ## Parameters + # - `data_key`: The key of the data to be retrieved. + # + # ## Returns + # - `{:ok, {batch_number, da_info}}` if the record is found + # - `{:error, :not_found}` if no record is found + @spec get_da_record_by_data_key_old_schema(binary()) :: {:ok, {non_neg_integer(), map()}} | {:error, :not_found} + defp get_da_record_by_data_key_old_schema(data_key) do + query = build_da_records_by_data_key_query(data_key) + + case select_repo(@api_true).one(query) do + nil -> {:error, :not_found} + keyset -> {:ok, {keyset.batch_number, keyset.data}} + end + end + + # Gets DA blob description using the post-migration database schema where DA blob + # descriptions and their associations with batches are stored in separate tables: + # + # - `arbitrum_da_multi_purpose` (`DaMultiPurposeRecord`): Stores the actual DA + # blob descriptions + # - `arbitrum_batches_to_da_blobs` (`BatchToDaBlob`): Maps batch numbers to DA + # blob IDs. + # + # ## Parameters + # - `data_key`: The key of the data to be retrieved. + # + # ## Returns + # - `{:ok, {batch_number, da_info}}` if the pair of batch number and DA blob description is found, where: + # * `batch_number` is the highest batch number associated with the DA blob description + # * `da_info` is the data from the DA blob description + # - `{:error, :not_found}` if no record is found + @spec get_da_record_by_data_key_new_schema(binary()) :: {:ok, {non_neg_integer(), map()}} | {:error, :not_found} + defp get_da_record_by_data_key_new_schema(data_key) do + repo = select_repo(@api_true) + + with da_record when not is_nil(da_record) <- repo.one(build_da_records_by_data_key_query(data_key)), + batch_number when not is_nil(batch_number) <- + data_key + |> build_batch_numbers_by_data_key_query() + |> limit(1) + |> repo.one() do + {:ok, {batch_number, da_record.data}} + else + nil -> {:error, :not_found} + end + end + + @doc """ + Retrieves Data Availability (DA) information from the database using the provided + batch number. + + The function handles both pre- and post-migration database schemas: + - In the pre-migration schema, DA records were stored directly in the + arbitrum_da_multi_purpose table with a batch_number field. + - In the post-migration schema, a separate arbitrum_batches_to_da_blobs table + enables many-to-many relationships between batches and DA blobs. + + ## Parameters + - `batch_number`: The batch number to be used for retrieval. + + ## Returns + - A map containing the DA information if found, otherwise an empty map. + """ + @spec get_da_info_by_batch_number(non_neg_integer()) :: map() + def get_da_info_by_batch_number(batch_number) do + # The migration normalizes how Data Availability (DA) records are stored in the database. + # Before the migration, the association between batches and DA blobs was stored directly + # in the arbitrum_da_multi_purpose table using a batch_number field. This approach had + # limitations when the same DA blob was used for different batches in AnyTrust chains. + # + # After the migration, the associations are stored in a separate arbitrum_batches_to_da_blobs + # table, allowing many-to-many relationships between batches and DA blobs. This change + # ensures proper handling of cases where multiple batches share the same DA blob. + case MigrationStatuses.get_arbitrum_da_records_normalization_finished() do + true -> + # Migration is complete, use new schema + get_da_info_by_batch_number_new_schema(batch_number) + + _ -> + # Migration in progress, try old schema first, then fallback to new + case get_da_info_by_batch_number_old_schema(batch_number) do + %{} = empty when map_size(empty) == 0 -> + get_da_info_by_batch_number_new_schema(batch_number) + + result -> + result + end + end + end + + # Retrieves DA info using the pre-migration database schema where DA records were stored + # directly in the arbitrum_da_multi_purpose table with a batch_number field. + # + # ## Parameters + # - `batch_number`: The batch number to lookup in the arbitrum_da_multi_purpose table + # - `options`: A keyword list of options: + # - `:api?` - Whether the function is being called from an API context. + # + # ## Returns + # - A map containing the DA info if found, otherwise an empty map + @spec get_da_info_by_batch_number_old_schema(non_neg_integer()) :: map() + defp get_da_info_by_batch_number_old_schema(batch_number) do + query = + from( + da_records in DaMultiPurposeRecord, + where: da_records.batch_number == ^batch_number and da_records.data_type == 0 + ) + + case select_repo(@api_true).one(query) do + nil -> %{} + record -> record.data + end + end + + # Gets DA info using the post-migration database schema where DA records and their + # associations with batches are stored in separate tables: + # + # - `arbitrum_da_multi_purpose` (`DaMultiPurposeRecord`): Stores the actual DA + # records with their data and type + # - `arbitrum_batches_to_da_blobs` (`BatchToDaBlob`): Maps batch numbers to DA + # blob IDs. + # + # ## Parameters + # - `batch_number`: The batch number to lookup in the arbitrum_batches_to_da_blobs table + # + # ## Returns + # - A map containing the DA info if found, otherwise an empty map + @spec get_da_info_by_batch_number_new_schema(non_neg_integer()) :: map() + defp get_da_info_by_batch_number_new_schema(batch_number) do + query = + from( + link in BatchToDaBlob, + join: da_record in DaMultiPurposeRecord, + on: link.data_blob_id == da_record.data_key, + where: link.batch_number == ^batch_number and da_record.data_type == 0, + select: da_record.data + ) + + case select_repo(@api_true).one(query) do + nil -> %{} + data -> data + end + end + + @doc """ + Retrieves the number of the highest confirmed rollup block. + + It calls `Common.highest_confirmed_block/1` with `@api_true` option to use + replica database. + + ## Returns + - The number of the highest confirmed rollup block, or `nil` if no confirmed rollup blocks are found. + """ + @spec highest_confirmed_block() :: FullBlock.block_number() | nil + def highest_confirmed_block do + Common.highest_confirmed_block(@api_true) + end + + @doc """ + Retrieves an AnyTrust keyset from the database using the provided keyset hash. + + It calls `Common.get_anytrust_keyset/1` with `api?: true` option to use + replica database. + + ## Parameters + - `keyset_hash`: A binary representing the hash of the keyset to be retrieved. + + ## Returns + - A map containing information about the AnyTrust keyset, otherwise an empty map. + """ + @spec get_anytrust_keyset(binary()) :: map() + def get_anytrust_keyset(keyset_hash) do + Common.get_anytrust_keyset(keyset_hash, api?: true) + end + + @doc """ + Retrieves all batch numbers associated with a Data Availability (DA) blob hash + and the corresponding DA blob description. + + The function handles both pre- and post-migration database schemas: + - In the pre-migration schema, only one batch can be associated with a DA blob, + so the function returns a single-element list with that batch number. + - In the post-migration schema, multiple batches can share the same DA blob, + so the function returns all associated batch numbers. + + ## Parameters + - `data_key`: The hash of the DA blob to find associated batch numbers for. + + ## Returns + - `{:ok, {batch_numbers, da_info}}` if the record is found, where: + * `batch_numbers` is a list of batch numbers associated with the DA blob, + sorted from highest to lowest + * `da_info` is the data from the DA blob description + - `{:error, :not_found}` if no record is found + """ + @spec get_all_da_records_by_data_key(binary()) :: {:ok, {[non_neg_integer()], map()}} | {:error, :not_found} + def get_all_da_records_by_data_key(data_key) do + case MigrationStatuses.get_arbitrum_da_records_normalization_finished() do + true -> + repo = select_repo(@api_true) + + with da_record when not is_nil(da_record) <- repo.one(build_da_records_by_data_key_query(data_key)), + batch_numbers when batch_numbers != [] <- repo.all(build_batch_numbers_by_data_key_query(data_key)) do + {:ok, {batch_numbers, da_record.data}} + else + _ -> {:error, :not_found} + end + + _ -> + # During migration, fall back to getting a single batch + case get_da_record_by_data_key(data_key) do + {:ok, {batch_number, da_info}} -> {:ok, {[batch_number], da_info}} + {:error, :not_found} = error -> error + end + end + end + + # Builds a query to fetch batch numbers associated with a DA blob hash. + # + # This function constructs an Ecto query to retrieve batch numbers from the + # arbitrum_batches_to_da_blobs table that match a specific data blob ID (hash). + # The batch numbers are sorted in descending order. + # + # ## Parameters + # - `data_key`: The hash of the data blob to find associated batch numbers for. + # + # ## Returns + # - An Ecto query that can be executed to fetch matching batch numbers. + @spec build_batch_numbers_by_data_key_query(binary()) :: Ecto.Query.t() + defp build_batch_numbers_by_data_key_query(data_key) do + from( + link in BatchToDaBlob, + where: link.data_blob_id == ^data_key, + select: link.batch_number, + order_by: [desc: link.batch_number] + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/common.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/common.ex new file mode 100644 index 000000000000..2d14cd46e693 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/common.ex @@ -0,0 +1,91 @@ +defmodule Explorer.Chain.Arbitrum.Reader.Common do + @moduledoc """ + Provides common database query functions for Arbitrum-specific data that are shared + between different Blockscout components. + + This module serves as a central location for core query functionality that needs to + be accessed from different logical parts of the application, such as: + + * Web API handlers (e.g. `Explorer.Chain.Arbitrum.Reader.API.Settlement`) + * Chain indexer components (e.g. `Explorer.Chain.Arbitrum.Reader.Indexer.Settlement`) + * Other potential consumers + + The functions in this module are designed to be configurable in terms of database + selection (primary vs replica) through options parameters. This allows the calling + modules to maintain their specific database access patterns while sharing the core + query logic. + + For example, API handlers typically use replica databases to reduce load on the + primary database, while indexer components require immediate consistency and thus + use the primary database. This module accommodates both use cases through options + parameters. + + When adding new functions to this module, ensure they: + * Are needed by multiple components of the application + * Accept options for configuring database selection + * Implement core query logic that can be reused across different contexts + """ + + import Ecto.Query, only: [from: 2] + import Explorer.Chain, only: [select_repo: 1, string_to_block_hash: 1] + + alias Explorer.Chain.Arbitrum.{ + BatchBlock, + DaMultiPurposeRecord + } + + alias Explorer.Chain.Block, as: FullBlock + + @doc """ + Retrieves the number of the highest confirmed rollup block. + + ## Parameters + - `options`: A keyword list of options: + - `:api?` - Whether the function is being called from an API context. + + ## Returns + - The number of the highest confirmed rollup block, or `nil` if no confirmed rollup blocks are found. + """ + @spec highest_confirmed_block(api?: boolean()) :: FullBlock.block_number() | nil + def highest_confirmed_block(options) do + query = + from( + rb in BatchBlock, + where: not is_nil(rb.confirmation_id), + select: rb.block_number, + order_by: [desc: rb.block_number], + limit: 1 + ) + + select_repo(options).one(query) + end + + @doc """ + Retrieves an AnyTrust keyset from the database using the provided keyset hash. + + ## Parameters + - `keyset_hash`: A binary representing the hash of the keyset to be retrieved. + - `options`: A keyword list of options: + - `:api?` - Whether the function is being called from an API context. + + ## Returns + - A map containing information about the AnyTrust keyset, otherwise an empty map. + """ + @spec get_anytrust_keyset(binary(), api?: boolean()) :: map() + def get_anytrust_keyset("0x" <> <<_::binary-size(64)>> = keyset_hash, options) do + get_anytrust_keyset(keyset_hash |> string_to_block_hash() |> Kernel.elem(1) |> Map.get(:bytes), options) + end + + def get_anytrust_keyset(keyset_hash, options) do + query = + from( + da_records in DaMultiPurposeRecord, + where: da_records.data_key == ^keyset_hash and da_records.data_type == 1 + ) + + case select_repo(options).one(query) do + nil -> %{} + keyset -> keyset.data + end + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/general.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/general.ex new file mode 100644 index 000000000000..a6762501f405 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/general.ex @@ -0,0 +1,46 @@ +defmodule Explorer.Chain.Arbitrum.Reader.Indexer.General do + @moduledoc """ + Provides general-purpose blockchain data reading functionality that is currently + not available in other chain-agnostic modules under `Explorer.Chain.*`. + + While these functions are located in the Arbitrum namespace, they are + implementation-agnostic and contain no Arbitrum-specific logic. They are + candidates for relocation to a general blockchain reader module when similar + functionality is needed for other chains. + """ + + import Ecto.Query, only: [from: 2] + + alias Explorer.{Chain, Repo} + + alias Explorer.Chain.Block, as: FullBlock + + @doc """ + Retrieves full details of rollup blocks, including associated transactions, for each + block number specified in the input list. + + ## Parameters + - `list_of_block_numbers`: A list of block numbers for which full block details are to be retrieved. + + ## Returns + - A list of `Explorer.Chain.Block` instances containing detailed information for each + block number in the input list. Returns an empty list if no blocks are found for the given numbers. + """ + @spec rollup_blocks([FullBlock.block_number()]) :: [FullBlock.t()] + def rollup_blocks(list_of_block_numbers) + + def rollup_blocks([]), do: [] + + def rollup_blocks(list_of_block_numbers) do + query = + from( + block in FullBlock, + where: block.number in ^list_of_block_numbers + ) + + query + # :optional is used since a block may not have any transactions + |> Chain.join_associations(%{:transactions => :optional}) + |> Repo.all() + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/messages.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/messages.ex new file mode 100644 index 000000000000..be2edad3ccf0 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/messages.ex @@ -0,0 +1,373 @@ +defmodule Explorer.Chain.Arbitrum.Reader.Indexer.Messages do + @moduledoc """ + Provides functions for querying and managing Arbitrum cross-chain messages in the Blockscout indexer. + + This module handles the retrieval and tracking of messages between a parent + chain and Orbit (built with Arbitrum technology) chains, including: + - L1-to-L2 message discovery and tracking + - L2-to-L1 message monitoring and status updates + - Detection of missed messages in both directions + - Tracking of L2-to-L1 message executions on L1 + """ + + import Ecto.Query, only: [dynamic: 2, from: 2, limit: 2, order_by: 2, select: 3, where: 3] + + alias Explorer.Chain.Arbitrum.{ + L1Execution, + LifecycleTransaction, + Message + } + + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.{Hash, Log, Transaction} + alias Explorer.{Chain, Repo} + + # https://github.com/OffchainLabs/go-ethereum/blob/dff302de66598c36b964b971f72d35a95148e650/core/types/transaction.go#L44C2-L50 + @message_to_l2_eth_deposit 100 + @message_to_l2_submit_retryable_transaction 105 + @to_l2_messages_transaction_types [ + @message_to_l2_eth_deposit, + @message_to_l2_submit_retryable_transaction + ] + + @doc """ + Retrieves the number of the latest L1 block where an L1-to-L2 message was discovered. + + ## Returns + - The number of L1 block, or `nil` if no L1-to-L2 messages are found. + """ + @spec l1_block_of_latest_discovered_message_to_l2() :: FullBlock.block_number() | nil + def l1_block_of_latest_discovered_message_to_l2 do + query = + from(msg in Message, + select: msg.originating_transaction_block_number, + where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), + order_by: [desc: msg.message_id], + limit: 1 + ) + + query + |> Repo.one(timeout: :infinity) + end + + @doc """ + Retrieves the number of the earliest L1 block where an L1-to-L2 message was discovered. + + ## Returns + - The number of L1 block, or `nil` if no L1-to-L2 messages are found. + """ + @spec l1_block_of_earliest_discovered_message_to_l2() :: FullBlock.block_number() | nil + def l1_block_of_earliest_discovered_message_to_l2 do + query = + from(msg in Message, + select: msg.originating_transaction_block_number, + where: msg.direction == :to_l2 and not is_nil(msg.originating_transaction_block_number), + order_by: [asc: msg.message_id], + limit: 1 + ) + + query + |> Repo.one(timeout: :infinity) + end + + @doc """ + Retrieves the rollup block number of the first missed L2-to-L1 message. + + The function identifies missing messages by checking logs for the specified + L2-to-L1 event and verifying if there are corresponding entries in the messages + table. A message is considered missed if there is a log entry without a + matching message record. + + ## Parameters + - `arbsys_contract`: The address of the Arbitrum system contract. + - `l2_to_l1_event`: The event identifier for L2-to-L1 messages. + + ## Returns + - The block number of the first missed L2-to-L1 message, or `nil` if no missed + messages are found. + """ + @spec rollup_block_of_first_missed_message_from_l2(binary(), binary()) :: FullBlock.block_number() | nil + def rollup_block_of_first_missed_message_from_l2(arbsys_contract, l2_to_l1_event) do + # credo:disable-for-lines:5 Credo.Check.Refactor.PipeChainStart + missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event) + |> order_by(desc: :block_number) + |> limit(1) + |> select([log], log.block_number) + |> Repo.one(timeout: :infinity) + end + + @doc """ + Retrieves the rollup block number of the first missed L1-to-L2 message. + + The function identifies missing messages by checking transactions of specific + types that are supposed to contain L1-to-L2 messages and verifying if there are + corresponding entries in the messages table. A message is considered missed if + there is a transaction without a matching message record. + + ## Returns + - The block number of the first missed L1-to-L2 message, or `nil` if no missed + messages are found. + """ + @spec rollup_block_of_first_missed_message_to_l2() :: FullBlock.block_number() | nil + def rollup_block_of_first_missed_message_to_l2 do + missed_messages_to_l2_query() + |> order_by(desc: :block_number) + |> limit(1) + |> select([rollup_transaction], rollup_transaction.block_number) + |> Repo.one(timeout: :infinity) + end + + @doc """ + Reads a list of transactions executing L2-to-L1 messages by their IDs. + + ## Parameters + - `message_ids`: A list of IDs to retrieve executing transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from + the input list. The output list may be smaller than the input list if some IDs do not + correspond to any existing transactions. + """ + @spec l1_executions(maybe_improper_list(non_neg_integer(), [])) :: [L1Execution.t()] + def l1_executions(message_ids) when is_list(message_ids) do + query = + from( + ex in L1Execution, + where: ex.message_id in ^message_ids + ) + + query + # :required is used since execution records in the table are created only when + # the corresponding execution transaction is indexed + |> Chain.join_associations(%{:execution_transaction => :required}) + |> Repo.all() + end + + @doc """ + Retrieves the number of the latest L1 block where a transaction executing an L2-to-L1 message was discovered. + + ## Returns + - The number of the latest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. + """ + @spec l1_block_of_latest_execution() :: FullBlock.block_number() | nil + def l1_block_of_latest_execution do + query = + from( + transaction in LifecycleTransaction, + inner_join: ex in L1Execution, + on: transaction.id == ex.execution_id, + select: transaction.block_number, + order_by: [desc: transaction.block_number], + limit: 1 + ) + + query + |> Repo.one(timeout: :infinity) + end + + @doc """ + Retrieves the number of the earliest L1 block where a transaction executing an L2-to-L1 message was discovered. + + ## Returns + - The number of the earliest L1 block with an executing transaction for an L2-to-L1 message, or `nil` if no such transactions are found. + """ + @spec l1_block_of_earliest_execution() :: FullBlock.block_number() | nil + def l1_block_of_earliest_execution do + query = + from( + transaction in LifecycleTransaction, + inner_join: ex in L1Execution, + on: transaction.id == ex.execution_id, + select: transaction.block_number, + order_by: [asc: transaction.block_number], + limit: 1 + ) + + query + |> Repo.one(timeout: :infinity) + end + + @doc """ + Retrieves all L2-to-L1 messages with the specified status. + + If `block_number` is not `nil`, only messages originating in rollup blocks with + numbers not higher than the specified block are considered. Otherwise, all + messages are considered. + + ## Parameters + - `status`: The status of the messages to retrieve, such as `:initiated`, + `:sent`, `:confirmed`, or `:relayed`. + - `block_number`: The number of a rollup block that limits the messages lookup, + or `nil`. + + ## Returns + - Instances of `Explorer.Chain.Arbitrum.Message` corresponding to the criteria, + or `[]` if no messages with the given status are found. + """ + @spec l2_to_l1_messages(:confirmed | :initiated | :relayed | :sent, FullBlock.block_number() | nil) :: [ + Message.t() + ] + def l2_to_l1_messages(status, block_number) + when status in [:initiated, :sent, :confirmed, :relayed] and + is_integer(block_number) and + block_number >= 0 do + query = + from(msg in Message, + where: + msg.direction == :from_l2 and msg.originating_transaction_block_number <= ^block_number and + msg.status == ^status, + order_by: [desc: msg.message_id] + ) + + Repo.all(query) + end + + def l2_to_l1_messages(status, nil) when status in [:initiated, :sent, :confirmed, :relayed] do + query = + from(msg in Message, + where: msg.direction == :from_l2 and msg.status == ^status, + order_by: [desc: msg.message_id] + ) + + Repo.all(query) + end + + @doc """ + Retrieves the transaction hashes for missed L1-to-L2 messages within a specified + block range. + + The function identifies missed messages by checking transactions of specific + types that are supposed to contain L1-to-L2 messages and verifying if there are + corresponding entries in the messages table. A message is considered missed if + there is a transaction without a matching message record within the specified + block range. + + ## Parameters + - `start_block`: The starting block number of the range. + - `end_block`: The ending block number of the range. + + ## Returns + - A list of transaction hashes for missed L1-to-L2 messages. + """ + @spec transactions_for_missed_messages_to_l2(non_neg_integer(), non_neg_integer()) :: [Hash.t()] + def transactions_for_missed_messages_to_l2(start_block, end_block) do + missed_messages_to_l2_query() + |> where( + [rollup_transaction], + rollup_transaction.block_number >= ^start_block and rollup_transaction.block_number <= ^end_block + ) + |> order_by(desc: :block_timestamp) + |> select([rollup_transaction], rollup_transaction.hash) + |> Repo.all() + end + + # Constructs a query to retrieve missed L1-to-L2 messages. + # + # The function constructs a query to identify missing messages by checking + # transactions of specific types that are supposed to contain L1-to-L2 + # messages and verifying if there are corresponding entries in the messages + # table. A message is considered missed if there is a transaction without a + # matching message record. + # + # ## Returns + # - A query to retrieve missed L1-to-L2 messages. + @spec missed_messages_to_l2_query() :: Ecto.Query.t() + defp missed_messages_to_l2_query do + from(rollup_transaction in Transaction, + left_join: msg in Message, + on: rollup_transaction.hash == msg.completion_transaction_hash and msg.direction == :to_l2, + where: rollup_transaction.type in @to_l2_messages_transaction_types and is_nil(msg.completion_transaction_hash) + ) + end + + @doc """ + Retrieves the logs for missed L2-to-L1 messages within a specified block range. + + The function identifies missed messages by checking logs for the specified + L2-to-L1 event and verifying if there are corresponding entries in the messages + table. A message is considered missed if there is a log entry without a + matching message record within the specified block range. + + ## Parameters + - `start_block`: The starting block number of the range. + - `end_block`: The ending block number of the range. + - `arbsys_contract`: The address of the Arbitrum system contract. + - `l2_to_l1_event`: The event identifier for L2-to-L1 messages. + + ## Returns + - A list of logs for missed L2-to-L1 messages. + """ + @spec logs_for_missed_messages_from_l2(non_neg_integer(), non_neg_integer(), binary(), binary()) :: [Log.t()] + def logs_for_missed_messages_from_l2(start_block, end_block, arbsys_contract, l2_to_l1_event) do + # credo:disable-for-lines:5 Credo.Check.Refactor.PipeChainStart + missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event, start_block, end_block) + |> where([log, msg], log.block_number >= ^start_block and log.block_number <= ^end_block) + |> order_by(desc: :block_number, desc: :index) + |> select([log], log) + |> Repo.all() + end + + # Constructs a query to retrieve missed L2-to-L1 messages. + # + # The function constructs a query to identify missing messages by checking logs + # for the specified L2-to-L1 and verifying if there are corresponding entries + # in the messages table within a given block range, or among all messages if no + # block range is provided. A message is considered missed if there is a log + # entry without a matching message record. + # + # ## Parameters + # - `arbsys_contract`: The address hash of the Arbitrum system contract. + # - `l2_to_l1_event`: The event identifier for L2 to L1 messages. + # - `start_block`: The starting block number for the search range (optional). + # - `end_block`: The ending block number for the search range (optional). + # + # ## Returns + # - A query to retrieve missed L2-to-L1 messages. + @spec missed_messages_from_l2_query(binary(), binary(), non_neg_integer() | nil, non_neg_integer() | nil) :: + Ecto.Query.t() + defp missed_messages_from_l2_query(arbsys_contract, l2_to_l1_event, start_block \\ nil, end_block \\ nil) do + # It is assumed that all the messages from the same transaction are handled + # atomically so there is no need to check the message_id for each log entry. + # Otherwise, the join condition must be extended with + # fragment("encode(l0.fourth_topic, 'hex') = LPAD(TO_HEX(a1.message_id::BIGINT), 64, '0')") + base_condition = + dynamic([log, msg], log.transaction_hash == msg.originating_transaction_hash and msg.direction == :from_l2) + + join_condition = + if is_nil(start_block) or is_nil(end_block) do + base_condition + else + dynamic( + [_, msg], + ^base_condition and + msg.originating_transaction_block_number >= ^start_block and + msg.originating_transaction_block_number <= ^end_block + ) + end + + from(log in Log, + left_join: msg in Message, + on: ^join_condition, + where: + log.address_hash == ^arbsys_contract and log.first_topic == ^l2_to_l1_event and + is_nil(msg.originating_transaction_hash) + ) + end + + @doc """ + Retrieves the message IDs of uncompleted L1-to-L2 messages. + + ## Returns + - A list of the message IDs of uncompleted L1-to-L2 messages. + """ + @spec get_uncompleted_l1_to_l2_messages_ids() :: [non_neg_integer()] + def get_uncompleted_l1_to_l2_messages_ids do + query = + from(msg in Message, + where: msg.direction == :to_l2 and is_nil(msg.completion_transaction_hash), + select: msg.message_id + ) + + Repo.all(query) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/parent_chain_transactions.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/parent_chain_transactions.ex new file mode 100644 index 000000000000..42543965e8d2 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/parent_chain_transactions.ex @@ -0,0 +1,115 @@ +defmodule Explorer.Chain.Arbitrum.Reader.Indexer.ParentChainTransactions do + @moduledoc """ + Provides functions for querying Arbitrum L1 (parent chain) lifecycle transactions. + + Lifecycle transactions are parent chain transactions that affect the state of the Arbitrum + rollup. These transactions can be: + * Batch commitment transactions created by the sequencer + * State root confirmation transactions after fraud proof window expiration + * User-initiated transactions executing messages from the rollup + """ + + import Ecto.Query, only: [from: 2] + + alias Explorer.Chain.Arbitrum.LifecycleTransaction + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.Hash + alias Explorer.Repo + + @doc """ + Reads a list of L1 transactions by their hashes from the `arbitrum_lifecycle_l1_transactions` table and returns their IDs. + + ## Parameters + - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. + + ## Returns + - A list of tuples containing transaction hashes and IDs for the transaction + hashes from the input list. The output list may be smaller than the input + list. + """ + @spec lifecycle_transaction_ids([binary()]) :: [{Hash.t(), non_neg_integer}] + def lifecycle_transaction_ids(l1_transaction_hashes) when is_list(l1_transaction_hashes) do + query = + from( + lt in LifecycleTransaction, + select: {lt.hash, lt.id}, + where: lt.hash in ^l1_transaction_hashes + ) + + Repo.all(query) + end + + @doc """ + Reads a list of L1 transactions by their hashes from the `arbitrum_lifecycle_l1_transactions` table. + + ## Parameters + - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` corresponding to the + hashes from the input list. The output list may be smaller than the input + list. + """ + @spec lifecycle_transactions([binary()]) :: [LifecycleTransaction.t()] + def lifecycle_transactions(l1_transaction_hashes) when is_list(l1_transaction_hashes) do + query = + from( + lt in LifecycleTransaction, + where: lt.hash in ^l1_transaction_hashes + ) + + Repo.all(query) + end + + @doc """ + Determines the next index for the L1 transaction available in the `arbitrum_lifecycle_l1_transactions` table. + + ## Returns + - The next available index. If there are no L1 transactions imported yet, it will return `1`. + """ + @spec next_lifecycle_transaction_id() :: non_neg_integer + def next_lifecycle_transaction_id do + query = + from(lt in LifecycleTransaction, + select: lt.id, + order_by: [desc: lt.id], + limit: 1 + ) + + last_id = + query + |> Repo.one() + |> Kernel.||(0) + + last_id + 1 + end + + @doc """ + Retrieves unfinalized L1 transactions from the `LifecycleTransaction` table that are + involved in changing the statuses of rollup blocks or transactions. + + An L1 transaction is considered unfinalized if it has not yet reached a state where + it is permanently included in the blockchain, meaning it is still susceptible to + potential reorganization or change. Transactions are evaluated against the `finalized_block` + parameter to determine their finalized status. + + ## Parameters + - `finalized_block`: The L1 block number above which transactions are considered finalized. + Transactions in blocks higher than this number are not included in the results. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.LifecycleTransaction` representing unfinalized transactions, + or `[]` if no unfinalized transactions are found. + """ + @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [LifecycleTransaction.t()] + def lifecycle_unfinalized_transactions(finalized_block) + when is_integer(finalized_block) and finalized_block >= 0 do + query = + from( + lt in LifecycleTransaction, + where: lt.block_number <= ^finalized_block and lt.status == :unfinalized + ) + + Repo.all(query) + end +end diff --git a/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/settlement.ex b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/settlement.ex new file mode 100644 index 000000000000..18db15cf3dea --- /dev/null +++ b/apps/explorer/lib/explorer/chain/arbitrum/reader/indexer/settlement.ex @@ -0,0 +1,483 @@ +defmodule Explorer.Chain.Arbitrum.Reader.Indexer.Settlement do + @moduledoc """ + Provides database query functions for retrieving information about Arbitrum rollup batches + and state confirmations on the L1 chain. + + This module focuses on reading settlement-related data for the Arbitrum indexer, including: + + * L1 batches - Sequential groups of L2 blocks committed to L1 via commitment transactions + * Batch blocks - Individual L2 blocks included in L1 batches + * Block confirmations - L1 transactions that confirm the state of L2 blocks + * Data availability records - Additional data associated with batches (e.g., AnyTrust keysets) + """ + + import Ecto.Query, only: [from: 2, subquery: 1] + + alias Explorer.Chain.Arbitrum.{ + BatchBlock, + L1Batch, + LifecycleTransaction + } + + alias Explorer.Chain.Arbitrum.Reader.Common + + alias Explorer.{Chain, Repo} + + alias Explorer.Chain.Block, as: FullBlock + + @doc """ + Retrieves the number of the latest L1 block where the commitment transaction with a batch was included. + + As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until + the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but + there is no commitment transaction is not possible. + + ## Returns + - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch + and the commitment transaction has been broken due to database inconsistency. + """ + @spec l1_block_of_latest_committed_batch() :: FullBlock.block_number() | nil + def l1_block_of_latest_committed_batch do + query = + from(batch in L1Batch, + order_by: [desc: batch.number], + limit: 1 + ) + + case query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one(timeout: :infinity) do + nil -> nil + batch -> batch.commitment_transaction.block_number + end + end + + @doc """ + Retrieves the number of the earliest L1 block where the commitment transaction with a batch was included. + + As per the Arbitrum rollup nature, from the indexer's point of view, a batch does not exist until + the commitment transaction is submitted to L1. Therefore, the situation where a batch exists but + there is no commitment transaction is not possible. + + ## Returns + - The number of the L1 block, or `nil` if no rollup batches are found, or if the association between the batch + and the commitment transaction has been broken due to database inconsistency. + """ + @spec l1_block_of_earliest_committed_batch() :: FullBlock.block_number() | nil + def l1_block_of_earliest_committed_batch do + query = + from(batch in L1Batch, + order_by: [asc: batch.number], + limit: 1 + ) + + case query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one(timeout: :infinity) do + nil -> nil + batch -> batch.commitment_transaction.block_number + end + end + + @doc """ + Retrieves the block number of the highest rollup block that has been included in a batch. + + ## Returns + - The number of the highest rollup block included in a batch, or `nil` if no rollup batches are found. + """ + @spec highest_committed_block() :: FullBlock.block_number() | nil + def highest_committed_block do + query = + from(batch in L1Batch, + select: batch.end_block, + order_by: [desc: batch.number], + limit: 1 + ) + + query + |> Repo.one() + end + + @doc """ + Gets the rollup block number by the hash of the block. Lookup is performed only + for blocks explicitly included in a batch, i.e., the batch has been identified by + the corresponding fetcher. The function may return `nil` as a successful response + if the batch containing the rollup block has not been indexed yet. + + ## Parameters + - `block_hash`: The hash of a block included in the batch. + + ## Returns + - the number of the rollup block corresponding to the given hash or `nil` if the + block or batch were not indexed yet. + """ + @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil + def rollup_block_hash_to_num(block_hash) when is_binary(block_hash) do + query = + from( + fb in FullBlock, + inner_join: rb in BatchBlock, + on: rb.block_number == fb.number, + select: fb.number, + where: fb.hash == ^block_hash + ) + + query + |> Repo.one() + end + + @doc """ + Checks if the numbers from the provided list correspond to the numbers of indexed batches. + + ## Parameters + - `batches_numbers`: The list of batch numbers. + + ## Returns + - A list of batch numbers that are indexed and match the provided list, or `[]` + if none of the batch numbers in the provided list exist in the database. The output list + may be smaller than the input list. + """ + @spec batches_exist(maybe_improper_list(non_neg_integer(), [])) :: [non_neg_integer] + def batches_exist(batches_numbers) when is_list(batches_numbers) do + query = + from( + batch in L1Batch, + select: batch.number, + where: batch.number in ^batches_numbers + ) + + query + |> Repo.all() + end + + @doc """ + Retrieves the batch in which the rollup block, identified by the given block number, was included. + + ## Parameters + - `number`: The number of a rollup block. + + ## Returns + - An instance of `Explorer.Chain.Arbitrum.L1Batch` representing the batch containing + the specified rollup block number, or `nil` if no corresponding batch is found. + """ + @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: L1Batch.t() | nil + def get_batch_by_rollup_block_number(number) + when is_integer(number) and number >= 0 do + query = + from(batch in L1Batch, + # end_block has higher number than start_block + where: batch.end_block >= ^number and batch.start_block <= ^number + ) + + query + # :required is used since the situation when commit transaction is not found is not possible + |> Chain.join_associations(%{:commitment_transaction => :required}) + |> Repo.one() + end + + @doc """ + Retrieves the batch by its number. + + ## Parameters + - `number`: The number of a rollup batch. + + ## Returns + - An instance of `Explorer.Chain.Arbitrum.L1Batch`, or `nil` if no batch with + such a number is found. + """ + @spec get_batch_by_number(non_neg_integer()) :: L1Batch.t() | nil + def get_batch_by_number(number) do + query = + from(batch in L1Batch, + where: batch.number == ^number + ) + + query + |> Repo.one() + end + + @doc """ + Retrieves the L1 block number where the confirmation transaction of the highest confirmed rollup block was included. + + ## Returns + - The L1 block number if a confirmed rollup block is found and the confirmation transaction is indexed; + `nil` if no confirmed rollup blocks are found or if there is a database inconsistency. + """ + @spec l1_block_of_latest_confirmed_block() :: FullBlock.block_number() | nil + def l1_block_of_latest_confirmed_block do + query = + from( + rb in BatchBlock, + where: not is_nil(rb.confirmation_id), + order_by: [desc: rb.block_number], + limit: 1 + ) + + case query + # :required is used since existence of the confirmation id is checked above + |> Chain.join_associations(%{:confirmation_transaction => :required}) + |> Repo.one(timeout: :infinity) do + nil -> + nil + + block -> + case block.confirmation_transaction do + # `nil` and `%Ecto.Association.NotLoaded{}` indicate DB inconsistency + nil -> nil + %Ecto.Association.NotLoaded{} -> nil + confirmation_transaction -> confirmation_transaction.block_number + end + end + end + + @doc """ + Retrieves the number of the highest confirmed rollup block. + + It calls `Common.highest_confirmed_block/1` with `api?: false` option to use + primary database. + + ## Returns + - The number of the highest confirmed rollup block, or `nil` if no confirmed rollup blocks are found. + """ + @spec highest_confirmed_block() :: FullBlock.block_number() | nil + def highest_confirmed_block do + Common.highest_confirmed_block(api?: false) + end + + @doc """ + Retrieves all unconfirmed rollup blocks within the specified range from `first_block` to `last_block`, + inclusive, where `first_block` is less than or equal to `last_block`. + + Since the function relies on the block data generated by the block fetcher, the returned list + may contain fewer blocks than actually exist if some of the blocks have not been indexed by the fetcher yet. + + ## Parameters + - `first_block`: The rollup block number starting the lookup range. + - `last_block`:The rollup block number ending the lookup range. + + ## Returns + - A list of maps containing the batch number, rollup block number and hash for each + unconfirmed block within the range. Returns `[]` if no unconfirmed blocks are found + within the range, or if the block fetcher has not indexed them. + """ + @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [BatchBlock.t()] + def unconfirmed_rollup_blocks(first_block, last_block) + when is_integer(first_block) and first_block >= 0 and + is_integer(last_block) and first_block <= last_block do + query = + from( + rb in BatchBlock, + where: rb.block_number >= ^first_block and rb.block_number <= ^last_block and is_nil(rb.confirmation_id), + order_by: [asc: rb.block_number] + ) + + Repo.all(query) + end + + @doc """ + Calculates the number of confirmed rollup blocks in the specified batch. + + ## Parameters + - `batch_number`: The number of the batch for which the count of confirmed blocks is to be calculated. + + ## Returns + - The number of confirmed blocks in the batch with the given number. + """ + @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer + def count_confirmed_rollup_blocks_in_batch(batch_number) + when is_integer(batch_number) and batch_number >= 0 do + query = + from( + rb in BatchBlock, + where: rb.batch_number == ^batch_number and not is_nil(rb.confirmation_id) + ) + + Repo.aggregate(query, :count) + end + + @doc """ + Retrieves the numbers of the L1 blocks containing the confirmation transactions + bounding the first interval where missed confirmation transactions could be found. + + The absence of a confirmation transaction is assumed based on the analysis of a + series of confirmed rollup blocks. For example, if blocks 0-3 are confirmed by transaction X, + blocks 7-9 by transaction Y, and blocks 12-15 by transaction Z, there are two gaps: + blocks 4-6 and 10-11. According to Arbitrum's nature, this indicates that the confirmation + transactions for blocks 6 and 11 have not yet been indexed. + + In the example above, the function will return the tuple with the numbers of the L1 blocks + where transactions Y and Z were included. + + ## Returns + - A tuple of the L1 block numbers between which missing confirmation transactions are suspected, + or `nil` if no gaps in confirmed blocks are found or if there are no missed confirmation transactions. + """ + @spec l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() :: + {FullBlock.block_number() | nil, FullBlock.block_number()} | nil + def l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap do + # The first subquery retrieves the numbers of confirmed rollup blocks. + rollup_blocks_query = + from( + rb in BatchBlock, + select: %{ + block_number: rb.block_number, + confirmation_id: rb.confirmation_id + }, + where: not is_nil(rb.confirmation_id) + ) + + # The second subquery builds on the first one, grouping block numbers by their + # confirmation transactions. As a result, it identifies the starting and ending + # rollup blocks for every transaction. + confirmed_ranges_query = + from( + subquery in subquery(rollup_blocks_query), + select: %{ + confirmation_id: subquery.confirmation_id, + min_block_num: min(subquery.block_number), + max_block_num: max(subquery.block_number) + }, + group_by: subquery.confirmation_id + ) + + # The third subquery utilizes the window function LAG to associate each confirmation + # transaction with the starting rollup block of the preceding transaction. + confirmed_combined_ranges_query = + from( + subquery in subquery(confirmed_ranges_query), + select: %{ + confirmation_id: subquery.confirmation_id, + min_block_num: subquery.min_block_num, + max_block_num: subquery.max_block_num, + prev_max_number: fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.max_block_num, subquery.min_block_num), + prev_confirmation_id: + fragment("LAG(?, 1) OVER (ORDER BY ?)", subquery.confirmation_id, subquery.min_block_num) + } + ) + + # The final query identifies confirmation transactions for which the ending block does + # not precede the starting block of the subsequent confirmation transaction. + main_query = + from( + subquery in subquery(confirmed_combined_ranges_query), + inner_join: current_transaction in LifecycleTransaction, + on: subquery.confirmation_id == current_transaction.id, + left_join: previous_transaction in LifecycleTransaction, + on: subquery.prev_confirmation_id == previous_transaction.id, + select: {previous_transaction.block_number, current_transaction.block_number}, + where: subquery.min_block_num - 1 != subquery.prev_max_number or is_nil(subquery.prev_max_number), + order_by: [desc: subquery.min_block_num], + limit: 1 + ) + + main_query + |> Repo.one() + end + + @doc """ + Retrieves an AnyTrust keyset from the database using the provided keyset hash. + + It calls `Common.get_anytrust_keyset/1` with `api?: false` option to use + primary database. + + ## Parameters + - `keyset_hash`: A binary representing the hash of the keyset to be retrieved. + + ## Returns + - A map containing information about the AnyTrust keyset, otherwise an empty map. + """ + @spec get_anytrust_keyset(binary()) :: map() + def get_anytrust_keyset(keyset_hash) do + Common.get_anytrust_keyset(keyset_hash, api?: false) + end + + @doc """ + Retrieves the batch numbers of missing L1 batches within a specified range. + + This function constructs a query to find the batch numbers of L1 batches that + are missing within the given range of batch numbers. It uses a right join with + a generated series to identify batch numbers that do not exist in the + `arbitrum_l1_batches` table. + + ## Parameters + - `start_batch_number`: The starting batch number of the search range. + - `end_batch_number`: The ending batch number of the search range. + + ## Returns + - A list of batch numbers in ascending order that are missing within the specified range. + """ + @spec find_missing_batches(non_neg_integer(), non_neg_integer()) :: [non_neg_integer()] + def find_missing_batches(start_batch_number, end_batch_number) + when is_integer(start_batch_number) and is_integer(end_batch_number) and end_batch_number >= start_batch_number do + query = + from(batch in L1Batch, + right_join: + missing_range in fragment( + """ + ( + SELECT distinct b1.number + FROM generate_series((?)::integer, (?)::integer) AS b1(number) + WHERE NOT EXISTS + (SELECT 1 FROM arbitrum_l1_batches b2 WHERE b2.number=b1.number) + ORDER BY b1.number DESC + ) + """, + ^start_batch_number, + ^end_batch_number + ), + on: batch.number == missing_range.number, + select: missing_range.number, + order_by: missing_range.number, + distinct: missing_range.number + ) + + query + |> Repo.all() + end + + @doc """ + Retrieves L1 block numbers for the given list of batch numbers. + + This function finds the numbers of L1 blocks that include L1 transactions + associated with batches within the specified list of batch numbers. + + ## Parameters + - `batch_numbers`: A list of batch numbers for which to retrieve the L1 block numbers. + + ## Returns + - A map where the keys are batch numbers and the values are corresponding L1 block numbers. + """ + @spec get_l1_blocks_of_batches_by_numbers([non_neg_integer()]) :: %{non_neg_integer() => FullBlock.block_number()} + def get_l1_blocks_of_batches_by_numbers(batch_numbers) when is_list(batch_numbers) do + query = + from(batch in L1Batch, + join: l1tx in assoc(batch, :commitment_transaction), + where: batch.number in ^batch_numbers, + select: {batch.number, l1tx.block_number} + ) + + query + |> Repo.all() + |> Enum.reduce(%{}, fn {batch_number, l1_block_number}, acc -> + Map.put(acc, batch_number, l1_block_number) + end) + end + + @doc """ + Retrieves the minimum and maximum batch numbers of L1 batches. + + ## Returns + - A tuple containing the minimum and maximum batch numbers or `{nil, nil}` if no batches are found. + """ + @spec get_min_max_batch_numbers() :: {non_neg_integer(), non_neg_integer()} | {nil | nil} + def get_min_max_batch_numbers do + query = + from(batch in L1Batch, + select: {min(batch.number), max(batch.number)} + ) + + Repo.one(query, timeout: :infinity) + end +end diff --git a/apps/explorer/lib/explorer/chain/block.ex b/apps/explorer/lib/explorer/chain/block.ex index 97b64444ad65..45312112ec3b 100644 --- a/apps/explorer/lib/explorer/chain/block.ex +++ b/apps/explorer/lib/explorer/chain/block.ex @@ -439,16 +439,48 @@ defmodule Explorer.Chain.Block do def uncle_reward_coef, do: @uncle_reward_coef + # Gets EIP-1559 config actual for the given block number. + # If not found, returns EIP_1559_BASE_FEE_MAX_CHANGE_DENOMINATOR and EIP_1559_ELASTICITY_MULTIPLIER env values. + # + # ## Parameters + # - `block_number`: The given block number. + # + # ## Returns + # - `{denominator, multiplier}` tuple. + @spec get_eip1559_config(non_neg_integer()) :: {non_neg_integer(), non_neg_integer()} + defp get_eip1559_config(block_number) do + with true <- Application.get_env(:explorer, :chain_type) == :optimism, + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + config = Explorer.Chain.Optimism.EIP1559ConfigUpdate.actual_config_for_block(block_number), + false <- is_nil(config) do + config + else + _ -> + {Application.get_env(:explorer, :base_fee_max_change_denominator), + Application.get_env(:explorer, :elasticity_multiplier)} + end + end + @doc """ Calculates the gas target for a given block. The gas target represents the percentage by which the actual gas used is above or below the gas target for the block, adjusted by the elasticity multiplier. If the `gas_limit` is greater than 0, it calculates the ratio of `gas_used` to `gas_limit` adjusted by this multiplier. + + The multiplier is read from the `EIP_1559_ELASTICITY_MULTIPLIER` env variable or from the `op_eip1559_config_updates` table + as a dynamic parameter (if OP Holocene upgrade is activated). + + ## Parameters + - `block`: A map representing block for which the gas target should be calculated. + + ## Returns + - A float value representing the gas target percentage. """ @spec gas_target(t()) :: float() def gas_target(block) do if Decimal.compare(block.gas_limit, 0) == :gt do - elasticity_multiplier = Application.get_env(:explorer, :elasticity_multiplier) + {_, elasticity_multiplier} = get_eip1559_config(block.number) + ratio = Decimal.div(block.gas_used, Decimal.div(block.gas_limit, elasticity_multiplier)) ratio |> Decimal.sub(1) |> Decimal.mult(100) |> Decimal.to_float() else @@ -478,11 +510,11 @@ defmodule Explorer.Chain.Block do gas_target = gas_limit / elasticity_multiplier base_fee_for_next_block = base_fee_per_gas + (base_fee_per_gas * gas_used_delta / gas_target / base_fee_max_change_denominator) - where elasticity_multiplier is an env variable `EIP_1559_ELASTICITY_MULTIPLIER`, - `gas_used_delta` is the difference between the actual gas used and the target gas - and `base_fee_max_change_denominator` is an env variable `EIP_1559_BASE_FEE_MAX_CHANGE_DENOMINATOR` that limits the maximum change of the base fee from one block to the next. - - + where `elasticity_multiplier` is an env variable `EIP_1559_ELASTICITY_MULTIPLIER` or the dynamic value + got from the `op_eip1559_config_updates` database table. The `gas_used_delta` is the difference between + the actual gas used and the target gas. The `base_fee_max_change_denominator` is an env variable + `EIP_1559_BASE_FEE_MAX_CHANGE_DENOMINATOR` (or the dynamic value got from the `op_eip1559_config_updates` + table) that limits the maximum change of the base fee from one block to the next. """ @spec next_block_base_fee_per_gas :: Decimal.t() | nil def next_block_base_fee_per_gas do @@ -501,8 +533,7 @@ defmodule Explorer.Chain.Block do @spec next_block_base_fee_per_gas(t()) :: Decimal.t() | nil def next_block_base_fee_per_gas(block) do - elasticity_multiplier = Application.get_env(:explorer, :elasticity_multiplier) - base_fee_max_change_denominator = Application.get_env(:explorer, :base_fee_max_change_denominator) + {base_fee_max_change_denominator, elasticity_multiplier} = get_eip1559_config(block.number) gas_target = Decimal.div(block.gas_limit, elasticity_multiplier) @@ -531,7 +562,7 @@ defmodule Explorer.Chain.Block do {_count, updated_numbers} = Repo.update_all( from(b in Block, join: s in subquery(query), on: b.hash == s.hash, select: b.number), - set: [refetch_needed: true] + set: [refetch_needed: true, updated_at: Timex.now()] ) MissingRangesManipulator.add_ranges_by_block_numbers(updated_numbers) diff --git a/apps/explorer/lib/explorer/chain/cache/background_migrations.ex b/apps/explorer/lib/explorer/chain/cache/background_migrations.ex index ebe0f199b2da..fb6d3cd9d5cb 100644 --- a/apps/explorer/lib/explorer/chain/cache/background_migrations.ex +++ b/apps/explorer/lib/explorer/chain/cache/background_migrations.ex @@ -1,6 +1,22 @@ defmodule Explorer.Chain.Cache.BackgroundMigrations do @moduledoc """ - Caches background migrations' status. + Caches the completion status of various background database migrations in the Blockscout system. + + This module leverages the MapCache behavior to maintain an in-memory cache of whether specific + database migrations have completed. It tracks the status of several critical migrations: + + * Transactions denormalization + * Address token balance token type migrations (both current and historical) + * Token transfer token type migrations + * Sanitization of duplicated log index logs + * Arbitrum DA records normalization + + Each migration status is cached to avoid frequent database checks, with a fallback mechanism + that asynchronously updates the cache when a status is not found. The default status for + any uncached migration is `false`, indicating the migration is not complete. + + The cache is particularly useful during the application startup and for performance-critical + operations that need to quickly check if certain data migrations have been completed. """ require Logger @@ -11,13 +27,17 @@ defmodule Explorer.Chain.Cache.BackgroundMigrations do key: :tb_token_type_finished, key: :ctb_token_type_finished, key: :tt_denormalization_finished, - key: :sanitize_duplicated_log_index_logs_finished + key: :sanitize_duplicated_log_index_logs_finished, + key: :backfill_multichain_search_db_finished, + key: :arbitrum_da_records_normalization_finished @dialyzer :no_match alias Explorer.Migrator.{ AddressCurrentTokenBalanceTokenType, AddressTokenBalanceTokenType, + ArbitrumDaRecordsNormalization, + BackfillMultichainSearchDB, SanitizeDuplicatedLogIndexLogs, TokenTransferTokenType, TransactionsDenormalization @@ -62,4 +82,20 @@ defmodule Explorer.Chain.Cache.BackgroundMigrations do {:return, false} end + + defp handle_fallback(:backfill_multichain_search_db_finished) do + Task.start_link(fn -> + set_backfill_multichain_search_db_finished(BackfillMultichainSearchDB.migration_finished?()) + end) + + {:return, false} + end + + defp handle_fallback(:arbitrum_da_records_normalization_finished) do + Task.start_link(fn -> + set_arbitrum_da_records_normalization_finished(ArbitrumDaRecordsNormalization.migration_finished?()) + end) + + {:return, false} + end end diff --git a/apps/explorer/lib/explorer/chain/cache/gas_price_oracle.ex b/apps/explorer/lib/explorer/chain/cache/gas_price_oracle.ex index 6e52a045d55a..4bf207f68f40 100644 --- a/apps/explorer/lib/explorer/chain/cache/gas_price_oracle.ex +++ b/apps/explorer/lib/explorer/chain/cache/gas_price_oracle.ex @@ -186,13 +186,14 @@ defmodule Explorer.Chain.Cache.GasPriceOracle do %Decimal{} = base_fee -> base_fee_wei = base_fee |> Wei.from(:wei) exchange_rate = Market.get_coin_exchange_rate() - average_block_time = get_average_block_time() + gas_prices = compose_gas_price(base_fee_wei, average_block_time, exchange_rate, base_fee_wei, 0) + %{ - slow: compose_gas_price(base_fee_wei, average_block_time, exchange_rate, base_fee_wei, 0), - average: compose_gas_price(base_fee_wei, average_block_time, exchange_rate, base_fee_wei, 0), - fast: compose_gas_price(base_fee_wei, average_block_time, exchange_rate, base_fee_wei, 0) + slow: gas_prices, + average: gas_prices, + fast: gas_prices } _ -> @@ -280,7 +281,12 @@ defmodule Explorer.Chain.Cache.GasPriceOracle do defp compose_gas_price(fee, time, exchange_rate, base_fee, priority_fee) do %{ price: fee |> format_wei(), - time: time && time |> Decimal.to_float(), + time: + case time do + time when is_float(time) -> time + %Decimal{} = time -> Decimal.to_float(time) + _ -> nil + end, fiat_price: fiat_fee(fee, exchange_rate), base_fee: base_fee |> format_wei(), priority_fee: base_fee && priority_fee && priority_fee |> Decimal.new() |> Wei.from(:wei) |> format_wei(), diff --git a/apps/explorer/lib/explorer/chain/celo/helper.ex b/apps/explorer/lib/explorer/chain/celo/helper.ex index 0ede3c91fa09..a5995f327335 100644 --- a/apps/explorer/lib/explorer/chain/celo/helper.ex +++ b/apps/explorer/lib/explorer/chain/celo/helper.ex @@ -91,4 +91,20 @@ defmodule Explorer.Chain.Celo.Helper do def block_number_to_epoch_number(block_number) when is_integer(block_number) do (block_number / @blocks_per_epoch) |> Float.ceil() |> trunc() end + + @doc """ + Convert the burn fraction from FixidityLib value to decimal. + + ## Examples + + iex> Explorer.Chain.Celo.Helper.burn_fraction_decimal(800_000_000_000_000_000_000_000) + Decimal.new("0.800000000000000000000000") + """ + @spec burn_fraction_decimal(integer()) :: Decimal.t() + def burn_fraction_decimal(burn_fraction_fixidity_lib) + when is_integer(burn_fraction_fixidity_lib) do + base = Decimal.new(1, 1, 24) + fraction = Decimal.new(1, burn_fraction_fixidity_lib, 0) + Decimal.div(fraction, base) + end end diff --git a/apps/explorer/lib/explorer/chain/denormalization_helper.ex b/apps/explorer/lib/explorer/chain/denormalization_helper.ex index 0839080ad23a..2af7759d1c3b 100644 --- a/apps/explorer/lib/explorer/chain/denormalization_helper.ex +++ b/apps/explorer/lib/explorer/chain/denormalization_helper.ex @@ -46,6 +46,20 @@ defmodule Explorer.Chain.DenormalizationHelper do end end + @doc """ + Checks if the transaction denormalization process has been completed. + + This function retrieves the status of the transaction denormalization + background migration. The denormalization process copies `block.consensus` and + `block.timestamp` to the respective fields in the transactions table for all + transactions with a `block_hash`. + + ## Returns + - `true` if all transactions with a `block_hash` have `block_consensus` and + `block_timestamp` set. + - `false` if the migration is still ongoing or the status is unknown. + """ + @spec transactions_denormalization_finished?() :: boolean() def transactions_denormalization_finished?, do: BackgroundMigrations.get_transactions_denormalization_finished() def tt_denormalization_finished?, do: BackgroundMigrations.get_tt_denormalization_finished() diff --git a/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist.ex b/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist.ex new file mode 100644 index 000000000000..64a628ead0b3 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist.ex @@ -0,0 +1,121 @@ +defmodule Explorer.Chain.Fetcher.AddressesBlacklist do + @moduledoc """ + General fetcher for addresses blacklist + """ + use GenServer + + @cache_name :addresses_blacklist + + @doc """ + Fetches the addresses blacklist. + """ + @callback fetch_addresses_blacklist() :: MapSet.t() + + @impl true + @spec init(any()) :: {:ok, nil} + def init(_) do + :ets.new(@cache_name, [ + :set, + :named_table, + :public, + read_concurrency: true + ]) + + GenServer.cast(__MODULE__, :fetch) + + {:ok, nil} + end + + @spec start_link(any()) :: :ignore | {:error, any()} | {:ok, pid()} + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + @impl true + def handle_cast(:fetch, state) do + run_fetch_task() + {:noreply, state} + end + + @impl true + def handle_info(:fetch, state) do + run_fetch_task() + {:noreply, state} + end + + @impl true + def handle_info({_ref, _result}, state) do + {:noreply, state} + end + + @impl true + def handle_info({:DOWN, _ref, :process, _pid, :normal}, state) do + Process.send_after(self(), :fetch, update_interval()) + {:noreply, state} + end + + @impl true + def handle_info({:DOWN, _ref, :process, _pid, _reason}, state) do + Process.send_after(self(), :fetch, retry_interval()) + {:noreply, state} + end + + defp run_fetch_task do + Task.Supervisor.async_nolink(Explorer.GenesisDataTaskSupervisor, fn -> + select_provider_module().fetch_addresses_blacklist() + |> MapSet.to_list() + |> save_in_ets_cache() + end) + end + + defp save_in_ets_cache(blacklist) do + :ets.delete_all_objects(@cache_name) + :ets.insert(@cache_name, blacklist) + end + + defp config do + Application.get_env(:explorer, Explorer.Chain.Fetcher.AddressesBlacklist) + end + + @spec url() :: any() + def url do + config()[:url] + end + + @spec enabled?() :: any() + defp enabled? do + config()[:enabled] + end + + @spec update_interval() :: any() + defp update_interval do + config()[:update_interval] + end + + @spec retry_interval() :: any() + defp retry_interval do + config()[:retry_interval] + end + + defp select_provider_module do + case config()[:provider] do + _ -> + Explorer.Chain.Fetcher.AddressesBlacklist.Blockaid + end + end + + @doc """ + Checks if the given address is blacklisted. + + ## Parameters + - `address_hash`: The address to check. + + ## Returns + - `true` if the address is blacklisted. + - `false` if the address is not blacklisted. + """ + @spec blacklisted?(any()) :: boolean() + def blacklisted?(address_hash) do + enabled?() && :ets.member(@cache_name, address_hash) + end +end diff --git a/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist/blockaid.ex b/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist/blockaid.ex new file mode 100644 index 000000000000..140f37e3c8b8 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fetcher/addresses_blacklist/blockaid.ex @@ -0,0 +1,44 @@ +defmodule Explorer.Chain.Fetcher.AddressesBlacklist.Blockaid do + @moduledoc """ + Fetcher for addresses blacklist from blockaid provider + """ + alias Explorer.Chain + alias Explorer.Chain.Fetcher.AddressesBlacklist + + @behaviour AddressesBlacklist + + @keys_to_blacklist ["OFAC", "Malicious"] + @timeout 60_000 + + @impl AddressesBlacklist + def fetch_addresses_blacklist do + case HTTPoison.get(AddressesBlacklist.url(), [], recv_timeout: @timeout, timeout: @timeout) do + {:ok, %HTTPoison.Response{status_code: 200, body: body}} -> + body + |> Jason.decode() + |> parse_blacklist() + + _ -> + MapSet.new() + end + end + + defp parse_blacklist({:ok, json}) when is_map(json) do + @keys_to_blacklist + |> Enum.reduce([], fn key, acc -> + acc ++ + (json + |> Map.get(key, []) + |> Enum.map(fn address_hash_string -> + address_hash_or_nil = Chain.string_to_address_hash_or_nil(address_hash_string) + address_hash_or_nil && {address_hash_or_nil, nil} + end) + |> Enum.reject(&is_nil/1)) + end) + |> MapSet.new() + end + + defp parse_blacklist({:error, _}) do + MapSet.new() + end +end diff --git a/apps/explorer/lib/explorer/chain/filecoin/id.ex b/apps/explorer/lib/explorer/chain/filecoin/id.ex index 74ce571e75c9..8c73460f6e90 100644 --- a/apps/explorer/lib/explorer/chain/filecoin/id.ex +++ b/apps/explorer/lib/explorer/chain/filecoin/id.ex @@ -53,7 +53,7 @@ defmodule Explorer.Chain.Filecoin.IDAddress do }} -> {:ok, %__MODULE__{value: value}} - :error -> + _ -> :error end end diff --git a/apps/explorer/lib/explorer/chain/import.ex b/apps/explorer/lib/explorer/chain/import.ex index 2a2ad02d26df..fc75a873fbed 100644 --- a/apps/explorer/lib/explorer/chain/import.ex +++ b/apps/explorer/lib/explorer/chain/import.ex @@ -12,12 +12,17 @@ defmodule Explorer.Chain.Import do require Logger @stages [ + [ + Import.Stage.Blocks + ], [ Import.Stage.Main ], [ Import.Stage.BlockTransactionReferencing, Import.Stage.TokenReferencing, + Import.Stage.TokenInstances, + Import.Stage.Logs, Import.Stage.InternalTransactions, Import.Stage.ChainTypeSpecific ] @@ -69,7 +74,7 @@ defmodule Explorer.Chain.Import do @type all_result :: {:ok, %{unquote_splicing(quoted_runner_imported)}} - | {:error, [Changeset.t()] | :timeout} + | {:error, [Changeset.t()] | :timeout | :insert_to_multichain_search_db_failed} | {:error, step :: Ecto.Multi.name(), failed_value :: any(), changes_so_far :: %{optional(Ecto.Multi.name()) => any()}} diff --git a/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_to_da_blobs.ex b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_to_da_blobs.ex new file mode 100644 index 000000000000..752b94d4da6c --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/arbitrum/batch_to_da_blobs.ex @@ -0,0 +1,102 @@ +defmodule Explorer.Chain.Import.Runner.Arbitrum.BatchToDaBlobs do + @moduledoc """ + Bulk imports of Explorer.Chain.Arbitrum.BatchToDaBlob. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Arbitrum.BatchToDaBlob + alias Explorer.Chain.Import + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [BatchToDaBlob.t()] + + @impl Import.Runner + def ecto_schema_module, do: BatchToDaBlob + + @impl Import.Runner + def option_key, do: :arbitrum_batches_to_da_blobs + + @impl Import.Runner + @spec imported_table_row() :: %{:value_description => binary(), :value_type => binary()} + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + @spec run(Multi.t(), list(), map()) :: Multi.t() + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_batches_to_da_blobs, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :arbitrum_batches_to_da_blobs, + :arbitrum_batches_to_da_blobs + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [BatchToDaBlob.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce BatchToDaBlob ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.batch_number) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: BatchToDaBlob, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :batch_number, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + rec in BatchToDaBlob, + update: [ + set: [ + # don't update `batch_number` as it is a primary key and used for the conflict target + data_blob_id: fragment("EXCLUDED.data_blob_id"), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", rec.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", rec.updated_at) + ] + ], + where: + fragment( + "EXCLUDED.data_blob_id IS DISTINCT FROM ?", + rec.data_blob_id + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/blocks.ex b/apps/explorer/lib/explorer/chain/import/runner/blocks.ex index f6f435a85d9b..6de55eda49cc 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/blocks.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/blocks.ex @@ -497,7 +497,7 @@ defmodule Explorer.Chain.Import.Runner.Blocks do query = from(cb in Address.CoinBalance, - select: cb.address_hash, + select: {cb.address_hash, cb.block_number}, inner_join: ordered_address_coin_balance in subquery(ordered_query), on: ordered_address_coin_balance.address_hash == cb.address_hash and @@ -505,9 +505,9 @@ defmodule Explorer.Chain.Import.Runner.Blocks do ) try do - {_count, deleted_coin_balances_address_hashes} = repo.delete_all(query, timeout: timeout) + {_count, deleted_coin_balances} = repo.delete_all(query, timeout: timeout) - {:ok, deleted_coin_balances_address_hashes} + {:ok, deleted_coin_balances} rescue postgrex_error in Postgrex.Error -> {:error, %{exception: postgrex_error, block_numbers: non_consensus_block_numbers}} @@ -516,10 +516,22 @@ defmodule Explorer.Chain.Import.Runner.Blocks do defp derive_address_fetched_coin_balances(_repo, [], _options), do: {:ok, []} - defp derive_address_fetched_coin_balances(repo, deleted_balances_address_hashes, options) do + defp derive_address_fetched_coin_balances(repo, deleted_coin_balances, options) do + {deleted_balances_address_hashes, deleted_balances_block_numbers} = Enum.unzip(deleted_coin_balances) + + filtered_address_hashes_query = + from(a in Address, + where: + a.hash in ^deleted_balances_address_hashes and + a.fetched_coin_balance_block_number in ^deleted_balances_block_numbers, + select: a.hash + ) + + filtered_address_hashes = repo.all(filtered_address_hashes_query) + last_balances_query = from(cb in Address.CoinBalance, - where: cb.address_hash in ^deleted_balances_address_hashes, + where: cb.address_hash in ^filtered_address_hashes, where: not is_nil(cb.value), distinct: cb.address_hash, order_by: [asc: cb.address_hash, desc: cb.block_number], diff --git a/apps/explorer/lib/explorer/chain/import/runner/optimism/eip1559_config_updates.ex b/apps/explorer/lib/explorer/chain/import/runner/optimism/eip1559_config_updates.ex new file mode 100644 index 000000000000..65ad40ee00dd --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/optimism/eip1559_config_updates.ex @@ -0,0 +1,104 @@ +defmodule Explorer.Chain.Import.Runner.Optimism.EIP1559ConfigUpdates do + @moduledoc """ + Bulk imports `t:Explorer.Chain.Optimism.EIP1559ConfigUpdate.t/0`. + """ + + require Ecto.Query + + alias Ecto.{Changeset, Multi, Repo} + alias Explorer.Chain.Import + alias Explorer.Chain.Optimism.EIP1559ConfigUpdate + alias Explorer.Prometheus.Instrumenter + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + # milliseconds + @timeout 60_000 + + @type imported :: [EIP1559ConfigUpdate.t()] + + @impl Import.Runner + def ecto_schema_module, do: EIP1559ConfigUpdate + + @impl Import.Runner + def option_key, do: :optimism_eip1559_config_updates + + @impl Import.Runner + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.take(~w(on_conflict timeout)a) + |> Map.put_new(:timeout, @timeout) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_eip1559_config_updates, fn repo, _ -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, changes_list, insert_options) end, + :block_referencing, + :optimism_eip1559_config_updates, + :optimism_eip1559_config_updates + ) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Repo.t(), [map()], %{required(:timeout) => timeout(), required(:timestamps) => Import.timestamps()}) :: + {:ok, [EIP1559ConfigUpdate.t()]} + | {:error, [Changeset.t()]} + def insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do + on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) + + # Enforce EIP1559ConfigUpdate ShareLocks order (see docs: sharelock.md) + ordered_changes_list = Enum.sort_by(changes_list, & &1.l2_block_number) + + {:ok, inserted} = + Import.insert_changes_list( + repo, + ordered_changes_list, + for: EIP1559ConfigUpdate, + returning: true, + timeout: timeout, + timestamps: timestamps, + conflict_target: :l2_block_number, + on_conflict: on_conflict + ) + + {:ok, inserted} + end + + defp default_on_conflict do + from( + update in EIP1559ConfigUpdate, + update: [ + set: [ + # don't update `l2_block_number` as it is a primary key and used for the conflict target + l2_block_hash: fragment("EXCLUDED.l2_block_hash"), + base_fee_max_change_denominator: fragment("EXCLUDED.base_fee_max_change_denominator"), + elasticity_multiplier: fragment("EXCLUDED.elasticity_multiplier"), + inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", update.inserted_at), + updated_at: fragment("GREATEST(?, EXCLUDED.updated_at)", update.updated_at) + ] + ], + where: + fragment( + "(EXCLUDED.l2_block_hash, EXCLUDED.base_fee_max_change_denominator, EXCLUDED.elasticity_multiplier) IS DISTINCT FROM (?, ?, ?)", + update.l2_block_hash, + update.base_fee_max_change_denominator, + update.elasticity_multiplier + ) + ) + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/optimism/withdrawal_events.ex b/apps/explorer/lib/explorer/chain/import/runner/optimism/withdrawal_events.ex index 401efdb3bd89..1816db039549 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/optimism/withdrawal_events.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/optimism/withdrawal_events.ex @@ -62,7 +62,7 @@ defmodule Explorer.Chain.Import.Runner.Optimism.WithdrawalEvents do on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) # Enforce WithdrawalEvent ShareLocks order (see docs: sharelock.md) - ordered_changes_list = Enum.sort_by(changes_list, &{&1.withdrawal_hash, &1.l1_event_type}) + ordered_changes_list = Enum.sort_by(changes_list, &{&1.withdrawal_hash, &1.l1_event_type, &1.l1_transaction_hash}) {:ok, inserted} = Import.insert_changes_list( @@ -72,7 +72,7 @@ defmodule Explorer.Chain.Import.Runner.Optimism.WithdrawalEvents do returning: true, timeout: timeout, timestamps: timestamps, - conflict_target: [:withdrawal_hash, :l1_event_type], + conflict_target: [:withdrawal_hash, :l1_event_type, :l1_transaction_hash], on_conflict: on_conflict ) @@ -86,8 +86,8 @@ defmodule Explorer.Chain.Import.Runner.Optimism.WithdrawalEvents do set: [ # don't update `withdrawal_hash` as it is a part of the composite primary key and used for the conflict target # don't update `l1_event_type` as it is a part of the composite primary key and used for the conflict target + # don't update `l1_transaction_hash` as it is a part of the composite primary key and used for the conflict target l1_timestamp: fragment("EXCLUDED.l1_timestamp"), - l1_transaction_hash: fragment("EXCLUDED.l1_transaction_hash"), l1_block_number: fragment("EXCLUDED.l1_block_number"), game_index: fragment("EXCLUDED.game_index"), inserted_at: fragment("LEAST(?, EXCLUDED.inserted_at)", we.inserted_at), @@ -96,9 +96,8 @@ defmodule Explorer.Chain.Import.Runner.Optimism.WithdrawalEvents do ], where: fragment( - "(EXCLUDED.l1_timestamp, EXCLUDED.l1_transaction_hash, EXCLUDED.l1_block_number, EXCLUDED.game_index) IS DISTINCT FROM (?, ?, ?, ?)", + "(EXCLUDED.l1_timestamp, EXCLUDED.l1_block_number, EXCLUDED.game_index) IS DISTINCT FROM (?, ?, ?)", we.l1_timestamp, - we.l1_transaction_hash, we.l1_block_number, we.game_index ) diff --git a/apps/explorer/lib/explorer/chain/import/runner/signed_authorizations.ex b/apps/explorer/lib/explorer/chain/import/runner/signed_authorizations.ex index 147f5c821dc7..2683c87bdcbd 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/signed_authorizations.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/signed_authorizations.ex @@ -64,11 +64,12 @@ defmodule Explorer.Chain.Import.Runner.SignedAuthorizations do defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps} = options) when is_list(changes_list) do on_conflict = Map.get_lazy(options, :on_conflict, &default_on_conflict/0) conflict_target = [:transaction_hash, :index] + ordered_changes_list = Enum.sort_by(changes_list, &{&1.transaction_hash, &1.index}) {:ok, _} = Import.insert_changes_list( repo, - changes_list, + ordered_changes_list, for: SignedAuthorization, on_conflict: on_conflict, conflict_target: conflict_target, diff --git a/apps/explorer/lib/explorer/chain/import/runner/tokens.ex b/apps/explorer/lib/explorer/chain/import/runner/tokens.ex index 618f814cc964..b68984b21cdd 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/tokens.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/tokens.ex @@ -96,9 +96,18 @@ defmodule Explorer.Chain.Import.Runner.Tokens do |> Map.put_new(:timeout, @timeout) |> Map.put(:timestamps, timestamps) - Multi.run(multi, :tokens, fn repo, _ -> + multi + |> Multi.run(:filter_token_params, fn repo, _ -> Instrumenter.block_import_stage_runner( - fn -> insert(repo, changes_list, insert_options) end, + fn -> filter_token_params(repo, changes_list) end, + :block_referencing, + :tokens, + :filter_token_params + ) + end) + |> Multi.run(:tokens, fn repo, %{filter_token_params: filtered_changes_list} -> + Instrumenter.block_import_stage_runner( + fn -> insert(repo, filtered_changes_list, insert_options) end, :block_referencing, :tokens, :tokens @@ -140,7 +149,27 @@ defmodule Explorer.Chain.Import.Runner.Tokens do ) end + defp filter_token_params(repo, changes_list) do + existing_token_map = + changes_list + |> Enum.map(& &1[:contract_address_hash]) + |> Enum.uniq() + |> Token.tokens_by_contract_address_hashes() + |> repo.all() + |> Map.new(&{&1.contract_address_hash, &1}) + + filtered_tokens = + Enum.filter(changes_list, fn token -> + existing_token = existing_token_map[token[:contract_address_hash]] + should_update?(token, existing_token) + end) + + {:ok, filtered_tokens} + end + if @bridged_tokens_enabled do + @fields_to_replace [:name, :symbol, :total_supply, :decimals, :type, :cataloged, :bridged, :skip_metadata] + def default_on_conflict do from( token in Token, @@ -176,6 +205,8 @@ defmodule Explorer.Chain.Import.Runner.Tokens do ) end else + @fields_to_replace [:name, :symbol, :total_supply, :decimals, :type, :cataloged, :skip_metadata] + def default_on_conflict do from( token in Token, @@ -209,4 +240,18 @@ defmodule Explorer.Chain.Import.Runner.Tokens do ) end end + + defp should_update?(_new_token, nil), do: true + + defp should_update?(new_token, existing_token) do + new_token_params = Map.take(new_token, @fields_to_replace) + + Enum.reduce_while(new_token_params, false, fn {key, value}, _acc -> + if Map.get(existing_token, key) == value do + {:cont, false} + else + {:halt, true} + end + end) + end end diff --git a/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex b/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex index 91c5a201206e..8c25a9a05495 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex @@ -10,7 +10,6 @@ defmodule Explorer.Chain.Import.Stage.BlockTransactionReferencing do @runners [ Runner.TokenTransfers, Runner.Transaction.Forks, - Runner.Logs, Runner.Block.Rewards, Runner.Block.SecondDegreeRelations, Runner.TransactionActions, diff --git a/apps/explorer/lib/explorer/chain/import/stage/blocks.ex b/apps/explorer/lib/explorer/chain/import/stage/blocks.ex new file mode 100644 index 000000000000..205acd55453f --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/stage/blocks.ex @@ -0,0 +1,27 @@ +defmodule Explorer.Chain.Import.Stage.Blocks do + @moduledoc """ + Import blocks. + """ + + alias Explorer.Chain.Import.{Runner, Stage} + + @behaviour Stage + + @runners [ + Runner.Blocks + ] + + @impl Stage + def runners, do: @runners + + @impl Stage + def all_runners, do: runners() + + @impl Stage + def multis(runner_to_changes_list, options) do + {final_multi, final_remaining_runner_to_changes_list} = + Stage.single_multi(runners(), runner_to_changes_list, options) + + {[final_multi], final_remaining_runner_to_changes_list} + end +end diff --git a/apps/explorer/lib/explorer/chain/import/stage/chain_type_specific.ex b/apps/explorer/lib/explorer/chain/import/stage/chain_type_specific.ex index 567bd3f6b181..a0629dca16b7 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/chain_type_specific.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/chain_type_specific.ex @@ -16,7 +16,8 @@ defmodule Explorer.Chain.Import.Stage.ChainTypeSpecific do Runner.Optimism.DisputeGames, Runner.Optimism.Deposits, Runner.Optimism.Withdrawals, - Runner.Optimism.WithdrawalEvents + Runner.Optimism.WithdrawalEvents, + Runner.Optimism.EIP1559ConfigUpdates ], polygon_edge: [ Runner.PolygonEdge.Deposits, @@ -50,7 +51,8 @@ defmodule Explorer.Chain.Import.Stage.ChainTypeSpecific do Runner.Arbitrum.L1Batches, Runner.Arbitrum.BatchBlocks, Runner.Arbitrum.BatchTransactions, - Runner.Arbitrum.DaMultiPurposeRecords + Runner.Arbitrum.DaMultiPurposeRecords, + Runner.Arbitrum.BatchToDaBlobs ], scroll: [ Runner.Scroll.BatchBundles, diff --git a/apps/explorer/lib/explorer/chain/import/stage/logs.ex b/apps/explorer/lib/explorer/chain/import/stage/logs.ex new file mode 100644 index 000000000000..739af1583d35 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/stage/logs.ex @@ -0,0 +1,27 @@ +defmodule Explorer.Chain.Import.Stage.Logs do + @moduledoc """ + Import logs. + """ + + alias Explorer.Chain.Import.{Runner, Stage} + + @behaviour Stage + + @runners [ + Runner.Logs + ] + + @impl Stage + def runners, do: @runners + + @impl Stage + def all_runners, do: runners() + + @impl Stage + def multis(runner_to_changes_list, options) do + {final_multi, final_remaining_runner_to_changes_list} = + Stage.single_multi(runners(), runner_to_changes_list, options) + + {[final_multi], final_remaining_runner_to_changes_list} + end +end diff --git a/apps/explorer/lib/explorer/chain/import/stage/main.ex b/apps/explorer/lib/explorer/chain/import/stage/main.ex index 3f3a581691d2..7df0f93aecf0 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/main.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/main.ex @@ -1,6 +1,6 @@ defmodule Explorer.Chain.Import.Stage.Main do @moduledoc """ - Imports main data (addresses, address_coin_balances, address_coin_balances_daily, tokens, blocks, transactions). + Imports main data (addresses, address_coin_balances, address_coin_balances_daily, tokens, transactions). """ alias Explorer.Chain.Import.{Runner, Stage} @@ -11,7 +11,6 @@ defmodule Explorer.Chain.Import.Stage.Main do @rest_runners [ Runner.Tokens, - Runner.Blocks, Runner.Address.CoinBalances, Runner.Address.CoinBalancesDaily, Runner.Transactions diff --git a/apps/explorer/lib/explorer/chain/import/stage/token_instances.ex b/apps/explorer/lib/explorer/chain/import/stage/token_instances.ex new file mode 100644 index 000000000000..785930e6e18d --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/stage/token_instances.ex @@ -0,0 +1,27 @@ +defmodule Explorer.Chain.Import.Stage.TokenInstances do + @moduledoc """ + Import token instances. + """ + + alias Explorer.Chain.Import.{Runner, Stage} + + @behaviour Stage + + @runners [ + Runner.TokenInstances + ] + + @impl Stage + def runners, do: @runners + + @impl Stage + def all_runners, do: runners() + + @impl Stage + def multis(runner_to_changes_list, options) do + {final_multi, final_remaining_runner_to_changes_list} = + Stage.single_multi(runners(), runner_to_changes_list, options) + + {[final_multi], final_remaining_runner_to_changes_list} + end +end diff --git a/apps/explorer/lib/explorer/chain/import/stage/token_referencing.ex b/apps/explorer/lib/explorer/chain/import/stage/token_referencing.ex index 22cb8466e827..18af2e8563b9 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/token_referencing.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/token_referencing.ex @@ -8,7 +8,6 @@ defmodule Explorer.Chain.Import.Stage.TokenReferencing do @behaviour Stage @runners [ - Runner.TokenInstances, Runner.Address.TokenBalances, Runner.Address.CurrentTokenBalances ] diff --git a/apps/explorer/lib/explorer/chain/optimism/eip1559_config_update.ex b/apps/explorer/lib/explorer/chain/optimism/eip1559_config_update.ex new file mode 100644 index 000000000000..86ecf8de24c7 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/optimism/eip1559_config_update.ex @@ -0,0 +1,174 @@ +defmodule Explorer.Chain.Optimism.EIP1559ConfigUpdate do + @moduledoc "Models EIP-1559 config updates for Optimism (introduced by Holocene upgrade)." + + use Explorer.Schema + + import Explorer.Chain, only: [get_last_fetched_counter: 1, upsert_last_fetched_counter: 1] + + alias Explorer.Chain.{Block, Hash} + alias Explorer.Repo + + @counter_type "optimism_eip1559_config_updates_fetcher_last_l2_block_hash" + @required_attrs ~w(l2_block_number l2_block_hash base_fee_max_change_denominator elasticity_multiplier)a + + @typedoc """ + * `l2_block_number` - An L2 block number where the config update was registered. + * `l2_block_hash` - An L2 block hash where the config update was registered. + * `base_fee_max_change_denominator` - A new value of the denominator. + * `elasticity_multiplier` - A new value of the multiplier. + """ + @primary_key false + typed_schema "op_eip1559_config_updates" do + field(:l2_block_number, :integer, primary_key: true) + field(:l2_block_hash, Hash.Full) + field(:base_fee_max_change_denominator, :integer) + field(:elasticity_multiplier, :integer) + + timestamps() + end + + @doc """ + Validates that the attributes are valid. + """ + def changeset(%__MODULE__{} = updates, attrs \\ %{}) do + updates + |> cast(attrs, @required_attrs) + |> validate_required(@required_attrs) + end + + @doc """ + Reads the config actual before the specified block from the `op_eip1559_config_updates` table. + + ## Parameters + - `block_number`: The block number for which we need to read the actual config. + + ## Returns + - `{denominator, multiplier}` tuple in case the config exists. + - `nil` if the config is unknown. + """ + @spec actual_config_for_block(non_neg_integer()) :: {non_neg_integer(), non_neg_integer()} | nil + def actual_config_for_block(block_number) do + query = + from(u in __MODULE__, + select: {u.base_fee_max_change_denominator, u.elasticity_multiplier}, + where: u.l2_block_number < ^block_number, + order_by: [desc: u.l2_block_number], + limit: 1 + ) + + Repo.one(query) + end + + @doc """ + Reads the last row from the `op_eip1559_config_updates` table. + + ## Returns + - `{l2_block_number, l2_block_hash}` tuple for the last row. + - `{0, nil}` if there are no rows in the table. + """ + @spec get_last_item() :: {non_neg_integer(), binary() | nil} + def get_last_item do + query = + from(u in __MODULE__, select: {u.l2_block_number, u.l2_block_hash}, order_by: [desc: u.l2_block_number], limit: 1) + + query + |> Repo.one() + |> Kernel.||({0, nil}) + end + + @doc """ + Removes rows from the `op_eip1559_config_updates` table which relate to + pre-Holocene period or which have l2_block_number greater than the latest block number. + They could be created mistakenly as a result of the incorrect value of + INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP env variable or due to reorg. + + ## Parameters + - `block_number`: L2 block number of the Holocene upgrade. + - `latest_block_number`: The latest block number. + + ## Returns + - A number of removed rows. + """ + @spec remove_invalid_updates(non_neg_integer(), integer()) :: non_neg_integer() + + def remove_invalid_updates(0, latest_block_number) do + {deleted_count, _} = + Repo.delete_all(from(u in __MODULE__, where: u.l2_block_number > ^latest_block_number), timeout: :infinity) + + deleted_count + end + + def remove_invalid_updates(block_number, latest_block_number) do + {deleted_count, _} = + Repo.delete_all( + from(u in __MODULE__, where: u.l2_block_number < ^block_number or u.l2_block_number > ^latest_block_number), + timeout: :infinity + ) + + deleted_count + end + + @doc """ + Reads the block hash from the `last_fetched_counters` table which related to + the last handled L2 block on the previous launch of Indexer.Fetcher.Optimism.EIP1559ConfigUpdate module. + + ## Returns + - The last L2 block hash in the form of `0x` string. + - "0x0" if this is the first launch of the module or the counter not found. + """ + @spec last_l2_block_hash() :: binary() + def last_l2_block_hash do + "0x" <> + (@counter_type + |> get_last_fetched_counter() + |> Decimal.to_integer() + |> Integer.to_string(16) + |> String.pad_leading(64, "0")) + end + + @doc """ + Updates the last handled L2 block by the Indexer.Fetcher.Optimism.EIP1559ConfigUpdate module. + The new block hash is written to the `last_fetched_counters` table. + + ## Parameters + - `block_hash`: The hash of the L2 block in the form of `0x` string. + + ## Returns + - nothing + """ + @spec set_last_l2_block_hash(binary()) :: any() + def set_last_l2_block_hash(block_hash) do + {block_hash_integer, ""} = + block_hash + |> String.trim_leading("0x") + |> Integer.parse(16) + + upsert_last_fetched_counter(%{ + counter_type: @counter_type, + value: block_hash_integer + }) + end + + @doc """ + Finds the closest block number which timestamp is greater or equal to the given timestamp. + + ## Parameters + - `timestamp`: The given timestamp. + + ## Returns + - The number of the found block. + - `nil` in case the block is not found. + """ + @spec nearest_block_number_to_timestamp(DateTime.t()) :: non_neg_integer() | nil + def nearest_block_number_to_timestamp(timestamp) do + query = + from(b in Block, + select: b.number, + where: b.timestamp >= ^timestamp and b.consensus == true, + order_by: [asc: b.number], + limit: 1 + ) + + Repo.one(query) + end +end diff --git a/apps/explorer/lib/explorer/chain/optimism/frame_sequence.ex b/apps/explorer/lib/explorer/chain/optimism/frame_sequence.ex index 3ec97e2b7ab5..18ee44275dd3 100644 --- a/apps/explorer/lib/explorer/chain/optimism/frame_sequence.ex +++ b/apps/explorer/lib/explorer/chain/optimism/frame_sequence.ex @@ -167,11 +167,7 @@ defmodule Explorer.Chain.Optimism.FrameSequence do :l2_block_start => non_neg_integer(), :l2_block_end => non_neg_integer(), :transaction_count => non_neg_integer(), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - :tx_count => non_neg_integer(), :l1_transaction_hashes => list(), - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hashes` property - :l1_tx_hashes => list(), :batch_data_container => :in_blob4844 | :in_celestia | :in_calldata | nil } def prepare_base_info_for_batch( @@ -188,11 +184,7 @@ defmodule Explorer.Chain.Optimism.FrameSequence do :l2_block_start => l2_block_number_from, :l2_block_end => l2_block_number_to, :transaction_count => transaction_count, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `transaction_count` property - :tx_count => transaction_count, :l1_transaction_hashes => batch.l1_transaction_hashes, - # todo: keep next line for compatibility with frontend and remove when new frontend is bound to `l1_transaction_hashes` property - :l1_tx_hashes => batch.l1_transaction_hashes, :batch_data_container => batch_data_container } end diff --git a/apps/explorer/lib/explorer/chain/optimism/withdrawal.ex b/apps/explorer/lib/explorer/chain/optimism/withdrawal.ex index 917e14e4a1e1..cb1f1bec9650 100644 --- a/apps/explorer/lib/explorer/chain/optimism/withdrawal.ex +++ b/apps/explorer/lib/explorer/chain/optimism/withdrawal.ex @@ -162,14 +162,31 @@ defmodule Explorer.Chain.Optimism.Withdrawal do end) end - @spec status(map(), list() | nil) :: {String.t(), DateTime.t() | nil} @doc """ - Gets Optimism Withdrawal status and remaining time to unlock (when the status is `In challenge period`). + Gets Optimism Withdrawal status and unlock datetime (only for `In challenge period`). + + Since OP Fault Proofs implementation assumes having more than one WithdrawalProven events for + the same withdrawal, the function analyzes all the WithdrawalProven events and determines + the current withdrawal status for each of them. The first success status is taken and returned + as the final one. + + ## Parameters + - `w`: A map with the withdrawal info. + - `respected_games`: A list of games returned by the `respected_games()` function. + Used to avoid duplicated SQL requests when the `status` function + is called in a loop. If `nil`, the `respected_games()` function + is called internally. + + ## Returns + - `{status, datetime}` tuple where the `status` is the current withdrawal status, + `datetime` is the point of time when the challenge period ends. + (only for `In challenge period` status). """ + @spec status(map(), list() | nil) :: {String.t(), DateTime.t() | nil} def status(w, respected_games \\ nil) def status(w, respected_games) when is_nil(w.l1_transaction_hash) do - proven_event = proven_event_by_hash(w.hash) + proven_events = proven_events_by_hash(w.hash) respected_games = if is_nil(respected_games) do @@ -178,7 +195,7 @@ defmodule Explorer.Chain.Optimism.Withdrawal do respected_games end - if is_nil(proven_event) do + if proven_events == [] do cond do appropriate_games_found(w.l2_block_number, respected_games) -> {@withdrawal_status_ready_to_prove, nil} @@ -190,7 +207,7 @@ defmodule Explorer.Chain.Optimism.Withdrawal do {@withdrawal_status_waiting_for_state_root, nil} end else - handle_proven_status(proven_event, respected_games) + handle_proven_status(proven_events, respected_games) end end @@ -269,33 +286,81 @@ defmodule Explorer.Chain.Optimism.Withdrawal do end end - defp handle_proven_status({l1_timestamp, game_index}, respected_games) do - game = game_by_index(game_index) - - cond do - is_nil(game_index) and not Enum.empty?(respected_games) -> - # here we cannot exactly determine the status `Waiting a game to resolve` or - # `Ready for relay` or `In challenge period` - # as we don't know the game index. In this case we display the `Proven` status - {@withdrawal_status_proven, nil} - - is_nil(game) or DateTime.compare(l1_timestamp, game.created_at) == :lt -> - # the old status determining approach - pre_fault_proofs_status(l1_timestamp) - - true -> - # the new status determining approach - post_fault_proofs_status(l1_timestamp, game) - end + # Determines the current withdrawal status by the list of the bound WithdrawalProven events. + # + # ## Parameters + # - `proven_events`: A list of WithdrawalProven events. Each item is `{l1_timestamp, game_index}` tuple. + # - `respected_games`: A list of games returned by the `respected_games()` function. + # + # ## Returns + # - `{status, datetime}` tuple where the `status` is the current withdrawal status, + # `datetime` is the point of time when the challenge period ends. + # (only for `In challenge period` status). + @spec handle_proven_status(list(), list()) :: {String.t(), DateTime.t() | nil} + defp handle_proven_status(proven_events, respected_games) do + statuses = + proven_events + |> Enum.reduce_while([], fn {l1_timestamp, game_index}, acc -> + game = game_by_index(game_index) + + # credo:disable-for-lines:16 Credo.Check.Refactor.PipeChainStart + cond do + is_nil(game_index) and not Enum.empty?(respected_games) -> + # here we cannot exactly determine the status `Waiting a game to resolve` or + # `Ready for relay` or `In challenge period` + # as we don't know the game index. In this case we display the `Proven` status + {@withdrawal_status_proven, nil} + + is_nil(game) or DateTime.compare(l1_timestamp, game.created_at) == :lt -> + # the old status determining approach + pre_fault_proofs_status(l1_timestamp) + + true -> + # the new status determining approach + post_fault_proofs_status(l1_timestamp, game) + end + |> case do + {@withdrawal_status_ready_for_relay, _} = status -> {:halt, [status]} + status -> {:cont, [status | acc]} + end + end) + + status_priority = + %{} + |> Map.put(@withdrawal_status_in_challenge, 30) + |> Map.put(@withdrawal_status_waiting_to_resolve, 20) + |> Map.put(@withdrawal_status_proven, 10) + + statuses + |> Enum.sort_by( + fn {s, timestamp} -> + {status_priority[s], if(not is_nil(timestamp), do: -DateTime.to_unix(timestamp))} + end, + :desc + ) + |> List.first() end - defp proven_event_by_hash(withdrawal_hash) do - Repo.replica().one( + # Gets the list of WithdrawalProven events from the `op_withdrawal_events` database table + # bound with the given withdrawal hash. The returned events are sorted by `l1_block_number` + # in ascending order. + # + # ## Parameters + # - `withdrawal_hash`: The withdrawal hash for which the function should return the events. + # + # ## Returns + # - A list of `{l1_timestamp, game_index}` tuples where `l1_timestamp` is the L1 block timestamp + # when the event appeared, `game_index` is the bound dispute game index (can be `nil`). + @spec proven_events_by_hash(Hash.t()) :: [{DateTime.t(), non_neg_integer()}] + defp proven_events_by_hash(withdrawal_hash) do + Repo.replica().all( from( we in WithdrawalEvent, select: {we.l1_timestamp, we.game_index}, - where: we.withdrawal_hash == ^withdrawal_hash and we.l1_event_type == :WithdrawalProven - ) + where: we.withdrawal_hash == ^withdrawal_hash and we.l1_event_type == :WithdrawalProven, + order_by: [asc: we.l1_block_number] + ), + timeout: :infinity ) end diff --git a/apps/explorer/lib/explorer/chain/optimism/withdrawal_event.ex b/apps/explorer/lib/explorer/chain/optimism/withdrawal_event.ex index aebc4ad4eb66..0ca44f41e58f 100644 --- a/apps/explorer/lib/explorer/chain/optimism/withdrawal_event.ex +++ b/apps/explorer/lib/explorer/chain/optimism/withdrawal_event.ex @@ -22,7 +22,7 @@ defmodule Explorer.Chain.Optimism.WithdrawalEvent do field(:withdrawal_hash, Hash.Full, primary_key: true) field(:l1_event_type, Ecto.Enum, values: [:WithdrawalProven, :WithdrawalFinalized], primary_key: true) field(:l1_timestamp, :utc_datetime_usec) - field(:l1_transaction_hash, Hash.Full) + field(:l1_transaction_hash, Hash.Full, primary_key: true) field(:l1_block_number, :integer) field(:game_index, :integer) diff --git a/apps/explorer/lib/explorer/chain/search.ex b/apps/explorer/lib/explorer/chain/search.ex index 20d0d35225fc..cca5e55e913d 100644 --- a/apps/explorer/lib/explorer/chain/search.ex +++ b/apps/explorer/lib/explorer/chain/search.ex @@ -1,20 +1,18 @@ defmodule Explorer.Chain.Search do @moduledoc """ - Search related functions + Search-related functions """ - import Ecto.Query, + import Ecto.Query + import Explorer.Chain, only: [select_repo: 1] + import Explorer.MicroserviceInterfaces.BENS, only: [ens_domain_name_lookup: 1] + + import Explorer.PagingOptions, only: [ - dynamic: 2, - from: 2, - limit: 2, - order_by: 3, - subquery: 1, - union: 2, - where: 3 + default_paging_options: 0 ] - import Explorer.Chain, only: [select_repo: 1] - import Explorer.MicroserviceInterfaces.BENS, only: [ens_domain_name_lookup: 1] + import Explorer.SortingHelper, only: [apply_sorting: 3, page_with_sorting: 4] + alias Explorer.{Chain, PagingOptions} alias Explorer.Helper, as: ExplorerHelper alias Explorer.Tags.{AddressTag, AddressToTag} @@ -24,126 +22,221 @@ defmodule Explorer.Chain.Search do Beacon.Blob, Block, DenormalizationHelper, + Hash, SmartContract, Token, Transaction, UserOperation } + use Utils.CompileTimeEnvHelper, chain_type: [:explorer, :chain_type] + + @min_query_length 3 + + @token_sorting [ + {:desc_nulls_last, :circulating_market_cap, :token}, + {:desc_nulls_last, :fiat_value, :token}, + {:desc_nulls_last, :is_verified_via_admin_panel, :token}, + {:desc_nulls_last, :holder_count, :token}, + {:asc, :name, :token}, + {:desc, :inserted_at, :token} + ] + + @contract_sorting [ + {:desc_nulls_last, :certified, :smart_contract}, + {:asc, :name, :smart_contract}, + {:desc, :inserted_at, :smart_contract} + ] + + @label_sorting [{:asc, :display_name, :address_tag}, {:desc, :inserted_at, :address_to_tag}] + @doc """ - Search function used in web interface. Returns paginated search results + Search function used in web interface and API v2. Returns paginated search results """ - @spec joint_search(PagingOptions.t(), integer(), binary(), [Chain.api?()] | []) :: list - def joint_search(paging_options, offset, raw_string, options \\ []) do - string = String.trim(raw_string) - - ens_task = maybe_run_ens_task(paging_options, raw_string, options) - - result = - case prepare_search_term(string) do - {:some, term} -> - query = base_joint_query(string, term) - - ordered_query = - from(items in subquery(query), - order_by: [ - desc: items.priority, - desc_nulls_last: items.certified, - desc_nulls_last: items.circulating_market_cap, - desc_nulls_last: items.exchange_rate, - desc_nulls_last: items.is_verified_via_admin_panel, - desc_nulls_last: items.holder_count, - asc: items.name, - desc: items.inserted_at - ], - limit: ^paging_options.page_size, - offset: ^offset - ) - - paginated_ordered_query = - ordered_query - |> page_search_results(paging_options) - - search_results = select_repo(options).all(paginated_ordered_query) - - search_results - |> Enum.map(fn result -> - result - |> compose_result_checksummed_address_hash() - |> format_timestamp() - end) - - _ -> + @spec joint_search(PagingOptions.t(), binary(), [Chain.api?()] | []) :: {list(), map() | nil} + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + def joint_search(paging_options, query_string, options \\ []) do + query_string = String.trim(query_string) + ens_task = run_ens_task_if_first_page(paging_options, query_string, options) + + search_results = + query_string + |> prepare_search_query(prepare_search_term(query_string)) + |> case do + nil -> [] + + {:address_hash, address_hash} -> + address_hash + |> search_token_by_address_hash_query() + |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) + |> union_all( + ^(address_hash + |> search_address_by_address_hash_query() + |> ExplorerHelper.maybe_hide_scam_addresses(:hash)) + ) + |> select_repo(options).all() + + {:filecoin, filecoin_address} -> + filecoin_address + |> address_by_filecoin_id_or_robust() + |> select_repo(options).all() + + {:full_hash, full_hash} -> + transaction_block_query = + full_hash + |> search_transaction_query() + |> union_all(^search_block_by_hash_query(full_hash)) + + transaction_block_op_query = + if UserOperation.enabled?() do + user_operation_query = search_user_operation_query(full_hash) + + transaction_block_query + |> union_all(^user_operation_query) + else + transaction_block_query + end + + result_query = + if Application.get_env(:explorer, :chain_type) == :ethereum do + blob_query = search_blob_query(full_hash) + + transaction_block_op_query + |> union_all(^blob_query) + else + transaction_block_op_query + end + + result_query + |> select_repo(options).all() + + {:number, block_number} -> + block_number + |> search_block_by_number_query() + |> select_repo(options).all() + + [{:number, block_number}, {:text, prepared_term}] -> + prepared_term + |> search_by_string(paging_options) + |> union_all(^search_block_by_number_query(block_number)) + |> order_and_page_text_search_result(paging_options) + |> select_repo(options).all() + + {:text, prepared_term} -> + prepared_term + |> search_by_string(paging_options) + |> order_and_page_text_search_result(paging_options) + |> select_repo(options).all() end + prepared_results = + search_results + |> Enum.map(fn result -> + result + |> compose_result_checksummed_address_hash() + |> format_timestamp() + end) + ens_result = (ens_task && await_ens_task(ens_task)) || [] - ens_result ++ result + trim_list_and_prepare_next_page_params(ens_result ++ prepared_results, paging_options, query_string) end - def base_joint_query(string, term) do - tokens_query = - string |> search_token_query(term) |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) - - contracts_query = term |> search_contract_query() |> ExplorerHelper.maybe_hide_scam_addresses(:address_hash) - labels_query = search_label_query(term) - address_query = string |> search_address_query() |> ExplorerHelper.maybe_hide_scam_addresses(:hash) - block_query = search_block_query(string) + defp order_and_page_text_search_result(query, paging_options) do + query + |> subquery() + |> order_by([item], + desc: item.priority, + desc_nulls_last: item.certified, + desc_nulls_last: item.circulating_market_cap, + desc_nulls_last: item.exchange_rate, + desc_nulls_last: item.is_verified_via_admin_panel, + desc_nulls_last: item.holder_count, + asc: item.name, + desc: item.inserted_at + ) + |> limit(^paging_options.page_size) + end - basic_query = - from( - tokens in subquery(tokens_query), - union: ^contracts_query, - union: ^labels_query - ) + @spec prepare_search_query(binary(), {:some, binary()} | :none) :: + {:address_hash, Hash.Address.t()} + | {:filecoin, any()} + | {:full_hash, Hash.t()} + | {:number, non_neg_integer()} + | [{:number, non_neg_integer()}, {:text, binary()}] + | {:text, binary()} + | nil + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp prepare_search_query(query, {:some, prepared_term}) do + address_hash_result = Chain.string_to_address_hash(query) + filecoin_address_result = maybe_parse_filecoin_address(query) + full_hash_result = Chain.string_to_transaction_hash(query) + non_negative_integer_result = ExplorerHelper.safe_parse_non_negative_integer(query) + query_length = String.length(query) cond do - address_query -> - basic_query - |> union(^address_query) + match?({:ok, _hash}, address_hash_result) -> + {:ok, hash} = address_hash_result + {:address_hash, hash} - valid_full_hash?(string) -> - transaction_query = search_transaction_query(string) + match?({:ok, _address}, filecoin_address_result) -> + {:ok, filecoin_address} = filecoin_address_result + {:filecoin, filecoin_address} - transaction_block_query = - basic_query - |> union(^transaction_query) - |> union(^block_query) + match?({:ok, _hash}, full_hash_result) -> + {:ok, hash} = full_hash_result + {:full_hash, hash} - transaction_block_op_query = - if UserOperation.enabled?() do - user_operation_query = search_user_operation_query(string) + match?({:ok, _block_number}, non_negative_integer_result) and query_length < @min_query_length -> + {:ok, block_number} = non_negative_integer_result + {:number, block_number} - transaction_block_query - |> union(^user_operation_query) - else - transaction_block_query - end - - if Application.get_env(:explorer, :chain_type) == :ethereum do - blob_query = search_blob_query(string) - - transaction_block_op_query - |> union(^blob_query) - else - transaction_block_op_query - end + match?({:ok, _block_number}, non_negative_integer_result) and query_length >= @min_query_length -> + {:ok, block_number} = non_negative_integer_result + [{:number, block_number}, {:text, prepared_term}] - block_query -> - basic_query - |> union(^block_query) + query_length >= @min_query_length -> + {:text, prepared_term} true -> - basic_query + nil end end - defp maybe_run_ens_task(%PagingOptions{key: nil}, query_string, options) do + defp prepare_search_query(_query, _) do + nil + end + + defp search_by_string(term, paging_options) do + tokens_query_certified = + term + |> search_token_query_certified(paging_options) + |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) + + tokens_query_not_certified = + term + |> search_token_query_not_certified(paging_options) + |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) + + contracts_query = + term |> search_contract_query(paging_options) |> ExplorerHelper.maybe_hide_scam_addresses(:address_hash) + + labels_query = search_label_query(term, paging_options) + + from( + tokens in subquery(tokens_query_certified), + union_all: ^tokens_query_not_certified, + union_all: ^contracts_query, + union_all: ^labels_query + ) + end + + defp run_ens_task_if_first_page(%PagingOptions{key: nil}, query_string, options) do Task.async(fn -> search_ens_name(query_string, options) end) end - defp maybe_run_ens_task(_, _query_string, _options), do: nil + defp run_ens_task_if_first_page(_, _query_string, _options), do: nil @doc """ Search function. Differences from joint_search/4: @@ -151,124 +244,129 @@ defmodule Explorer.Chain.Search do For example if was found 50 tokens, 50 smart-contracts, 50 labels, 1 address, 1 transaction and 2 blocks (impossible, just example) and page_size=50. Then function will return: [1 address, 1 transaction, 2 blocks, 16 tokens, 15 smart-contracts, 15 labels] 2. Results couldn't be paginated + + `balanced_unpaginated_search` function is used at api/v2/search/quick endpoint. """ @spec balanced_unpaginated_search(PagingOptions.t(), binary(), [Chain.api?()] | []) :: list - # credo:disable-for-next-line - def balanced_unpaginated_search(paging_options, raw_search_query, options \\ []) do - search_query = String.trim(raw_search_query) - ens_task = Task.async(fn -> search_ens_name(raw_search_query, options) end) - - case prepare_search_term(search_query) do - {:some, term} -> - tokens_result = - search_query - |> search_token_query(term) - |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) - |> order_by([token], - desc_nulls_last: token.circulating_market_cap, - desc_nulls_last: token.fiat_value, - desc_nulls_last: token.is_verified_via_admin_panel, - desc_nulls_last: token.holder_count, - asc: token.name, - desc: token.inserted_at - ) - |> limit(^paging_options.page_size) - |> select_repo(options).all() + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + def balanced_unpaginated_search(paging_options, query_string, options \\ []) do + query_string = String.trim(query_string) + ens_task = Task.async(fn -> search_ens_name(query_string, options) end) + + results = + query_string + |> prepare_search_query(prepare_search_term(query_string)) + |> case do + nil -> + [] - contracts_result = - term - |> search_contract_query() - |> ExplorerHelper.maybe_hide_scam_addresses(:address_hash) - |> order_by([items], asc: items.name, desc: items.inserted_at) - |> limit(^paging_options.page_size) - |> select_repo(options).all() + {:address_hash, address_hash} -> + [ + address_hash + |> search_token_by_address_hash_query() + |> union_all(^search_address_by_address_hash_query(address_hash)) + |> select_repo(options).all() + ] - labels_result = - term - |> search_label_query() - |> order_by([att, at], asc: at.display_name, desc: att.inserted_at) - |> limit(^paging_options.page_size) - |> select_repo(options).all() + {:filecoin, filecoin_address} -> + [ + filecoin_address + |> address_by_filecoin_id_or_robust() + |> select_repo(options).all() + ] - transaction_result = - if valid_full_hash?(search_query) do - search_query + {:full_hash, full_hash} -> + transaction_block_query = + full_hash |> search_transaction_query() - |> select_repo(options).all() - else - [] - end + |> union_all(^search_block_by_hash_query(full_hash)) - op_result = - if valid_full_hash?(search_query) && UserOperation.enabled?() do - search_query - |> search_user_operation_query() - |> select_repo(options).all() - else - [] - end + transaction_block_op_query = + if UserOperation.enabled?() do + user_operation_query = search_user_operation_query(full_hash) - blob_result = - if valid_full_hash?(search_query) && Application.get_env(:explorer, :chain_type) == :ethereum do - search_query - |> search_blob_query() - |> select_repo(options).all() - else - [] - end + transaction_block_query + |> union_all(^user_operation_query) + else + transaction_block_query + end - address_result = - if query = search_address_query(search_query) do - query - |> ExplorerHelper.maybe_hide_scam_addresses(:hash) - |> select_repo(options).all() - else - [] - end + result_query = + if Application.get_env(:explorer, :chain_type) == :ethereum do + blob_query = search_blob_query(full_hash) + + transaction_block_op_query + |> union_all(^blob_query) + else + transaction_block_op_query + end - blocks_result = - if query = search_block_query(search_query) do - query - |> limit(^paging_options.page_size) + [ + result_query |> select_repo(options).all() - else - [] - end + ] - ens_result = await_ens_task(ens_task) + {:number, block_number} -> + [ + block_number + |> search_block_by_number_query() + |> select_repo(options).all() + ] - non_empty_lists = + [{:number, block_number}, {:text, prepared_term}] -> [ - tokens_result, - contracts_result, - labels_result, - transaction_result, - op_result, - blob_result, - address_result, - blocks_result, - ens_result + block_number |> search_block_by_number_query() |> select_repo(options).all() + | search_by_string_balanced(prepared_term, paging_options, options) ] - |> Enum.filter(fn list -> not Enum.empty?(list) end) - |> Enum.sort_by(fn list -> Enum.count(list) end, :asc) - - to_take = - non_empty_lists - |> Enum.map(fn list -> Enum.count(list) end) - |> take_all_categories(List.duplicate(0, Enum.count(non_empty_lists)), paging_options.page_size) - - non_empty_lists - |> Enum.zip_reduce(to_take, [], fn x, y, acc -> acc ++ Enum.take(x, y) end) - |> Enum.map(fn result -> - result - |> compose_result_checksummed_address_hash() - |> format_timestamp() - end) - |> Enum.sort_by(fn item -> item.priority end, :desc) - _ -> - [] - end + {:text, prepared_term} -> + search_by_string_balanced(prepared_term, paging_options, options) + end + + ens_result = await_ens_task(ens_task) + + non_empty_lists = + [ + ens_result | results + ] + |> Enum.filter(fn list -> not Enum.empty?(list) end) + |> Enum.sort_by(fn list -> Enum.count(list) end, :asc) + + to_take = + non_empty_lists + |> Enum.map(fn list -> Enum.count(list) end) + |> take_all_categories(List.duplicate(0, Enum.count(non_empty_lists)), paging_options.page_size) + + non_empty_lists + |> Enum.zip_reduce(to_take, [], fn x, y, acc -> acc ++ Enum.take(x, y) end) + |> Enum.map(fn result -> + result + |> compose_result_checksummed_address_hash() + |> format_timestamp() + end) + |> Enum.sort_by(fn item -> item.priority end, :desc) + end + + defp search_by_string_balanced(term, paging_options, options) do + tokens_results = + (term + |> search_token_query_certified(paging_options) + |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) + |> select_repo(options).all()) ++ + (term + |> search_token_query_not_certified(paging_options) + |> ExplorerHelper.maybe_hide_scam_addresses(:contract_address_hash) + |> select_repo(options).all()) + + contracts_results = + term + |> search_contract_query(paging_options) + |> ExplorerHelper.maybe_hide_scam_addresses(:address_hash) + |> select_repo(options).all() + + labels_query = term |> search_label_query(paging_options) |> select_repo(options).all() + + [tokens_results, contracts_results, labels_query] end defp await_ens_task(ens_task) do @@ -295,14 +393,14 @@ defmodule Explorer.Chain.Search do end end - defp search_label_query(term) do + defp search_label_query(term, paging_options) do label_search_fields = search_fields() - |> Map.put(:address_hash, dynamic([att, _, _], att.address_hash)) + |> Map.put(:address_hash, dynamic([address_to_tag: att], att.address_hash)) |> Map.put(:type, "label") - |> Map.put(:name, dynamic([_, at, _], at.display_name)) - |> Map.put(:inserted_at, dynamic([att, _, _], att.inserted_at)) - |> Map.put(:verified, dynamic([_, _, smart_contract], not is_nil(smart_contract))) + |> Map.put(:name, dynamic([address_tag: at], at.display_name)) + |> Map.put(:inserted_at, dynamic([address_to_tag: att], att.inserted_at)) + |> Map.put(:verified, dynamic([smart_contract: smart_contract], not is_nil(smart_contract))) |> Map.put(:priority, 1) inner_query = @@ -311,139 +409,151 @@ defmodule Explorer.Chain.Search do select: tag ) - from(att in AddressToTag, - inner_join: at in subquery(inner_query), - on: att.tag_id == at.id, - left_join: smart_contract in SmartContract, - on: att.address_hash == smart_contract.address_hash, - select: ^label_search_fields - ) + base_query = + from(att in AddressToTag, + as: :address_to_tag, + inner_join: at in subquery(inner_query), + as: :address_tag, + on: att.tag_id == at.id, + left_join: smart_contract in SmartContract, + as: :smart_contract, + on: att.address_hash == smart_contract.address_hash, + select: ^label_search_fields + ) + + base_query + |> apply_sorting([], @label_sorting) + |> page_search_results(paging_options, "label") end - defp search_token_query(string, term) do - token_search_fields = - search_fields() - |> Map.put(:address_hash, dynamic([token, _], token.contract_address_hash)) - |> Map.put(:type, "token") - |> Map.put(:name, dynamic([token, _], token.name)) - |> Map.put(:symbol, dynamic([token, _], token.symbol)) - |> Map.put(:holder_count, dynamic([token, _], token.holder_count)) - |> Map.put(:inserted_at, dynamic([token, _], token.inserted_at)) - |> Map.put(:icon_url, dynamic([token, _], token.icon_url)) - |> Map.put(:token_type, dynamic([token, _], token.type)) - |> Map.put(:verified, dynamic([_, smart_contract], not is_nil(smart_contract))) - |> Map.put(:certified, dynamic([_, smart_contract], smart_contract.certified)) - |> Map.put(:exchange_rate, dynamic([token, _], token.fiat_value)) - |> Map.put(:total_supply, dynamic([token, _], token.total_supply)) - |> Map.put(:circulating_market_cap, dynamic([token, _], token.circulating_market_cap)) - |> Map.put(:is_verified_via_admin_panel, dynamic([token, _], token.is_verified_via_admin_panel)) - - case Chain.string_to_address_hash(string) do - {:ok, address_hash} -> - from(token in Token, - left_join: smart_contract in SmartContract, - on: token.contract_address_hash == smart_contract.address_hash, - where: token.contract_address_hash == ^address_hash, - select: ^token_search_fields - ) + defp search_token_query_not_certified(term, paging_options) do + term + |> search_token_by_symbol_or_name_query(paging_options) + |> where([smart_contract: smart_contract], is_nil(smart_contract.certified) or not smart_contract.certified) + end - _ -> - from(token in Token, - left_join: smart_contract in SmartContract, - on: token.contract_address_hash == smart_contract.address_hash, - where: fragment("to_tsvector('english', ? || ' ' || ?) @@ to_tsquery(?)", token.symbol, token.name, ^term), - select: ^token_search_fields - ) - end + defp search_token_query_certified(term, paging_options) do + term + |> search_token_by_symbol_or_name_query(paging_options) + |> where([smart_contract: smart_contract], smart_contract.certified) end - defp search_contract_query(term) do + defp search_token_by_symbol_or_name_query(term, paging_options) do + base_query = + from(token in Token, + as: :token, + left_join: smart_contract in SmartContract, + as: :smart_contract, + on: token.contract_address_hash == smart_contract.address_hash, + where: fragment("to_tsvector('english', ? || ' ' || ?) @@ to_tsquery(?)", token.symbol, token.name, ^term), + select: ^token_search_fields() + ) + + base_query |> apply_sorting([], @token_sorting) |> page_search_results(paging_options, "token") + end + + defp search_token_by_address_hash_query(address_hash) do + from(token in Token, + as: :token, + left_join: smart_contract in SmartContract, + as: :smart_contract, + on: token.contract_address_hash == smart_contract.address_hash, + where: token.contract_address_hash == ^address_hash, + select: ^token_search_fields() + ) + end + + defp search_contract_query(term, paging_options) do contract_search_fields = search_fields() - |> Map.put(:address_hash, dynamic([smart_contract, _], smart_contract.address_hash)) + |> Map.put(:address_hash, dynamic([smart_contract: smart_contract], smart_contract.address_hash)) |> Map.put(:type, "contract") - |> Map.put(:name, dynamic([smart_contract, _], smart_contract.name)) - |> Map.put(:inserted_at, dynamic([_, address], address.inserted_at)) - |> Map.put(:certified, dynamic([smart_contract, _], smart_contract.certified)) + |> Map.put(:name, dynamic([smart_contract: smart_contract], smart_contract.name)) + |> Map.put(:inserted_at, dynamic([smart_contract: smart_contract], smart_contract.inserted_at)) + |> Map.put(:certified, dynamic([smart_contract: smart_contract], smart_contract.certified)) |> Map.put(:verified, true) - from(smart_contract in SmartContract, - left_join: address in Address, - on: smart_contract.address_hash == address.hash, - where: fragment("to_tsvector('english', ?) @@ to_tsquery(?)", smart_contract.name, ^term), - select: ^contract_search_fields - ) + base_query = + from(smart_contract in SmartContract, + as: :smart_contract, + where: fragment("to_tsvector('english', ?) @@ to_tsquery(?)", smart_contract.name, ^term), + select: ^contract_search_fields + ) + + base_query + |> apply_sorting([], @contract_sorting) + |> page_search_results(paging_options, "contract") end - defp search_address_query(term) do - case Chain.string_to_address_hash(term) do - {:ok, address_hash} -> - address_search_fields = - search_fields() - |> Map.put(:address_hash, dynamic([address, _, _], address.hash)) - |> Map.put(:type, "address") - |> Map.put(:name, dynamic([_, address_name, _], address_name.name)) - |> Map.put(:inserted_at, dynamic([_, address_name, _], address_name.inserted_at)) - |> Map.put(:verified, dynamic([address, _, _], address.verified)) - |> Map.put(:certified, dynamic([_, _, smart_contract], smart_contract.certified)) - - from(address in Address, - left_join: - address_name in subquery( - from(name in Address.Name, - where: name.address_hash == ^address_hash, - order_by: [desc: name.primary], - limit: 1 - ) - ), - on: address.hash == address_name.address_hash, - left_join: smart_contract in SmartContract, - on: address.hash == smart_contract.address_hash, - where: address.hash == ^address_hash, - select: ^address_search_fields + defp search_address_by_address_hash_query(address_hash) do + address_search_fields = + search_fields() + |> Map.put(:address_hash, dynamic([address: address], address.hash)) + |> Map.put(:type, "address") + |> Map.put(:name, dynamic([address_name: address_name], address_name.name)) + |> Map.put(:inserted_at, dynamic([address: address], address.inserted_at)) + |> Map.put(:verified, dynamic([address: address], address.verified)) + |> Map.put(:certified, dynamic([smart_contract: smart_contract], smart_contract.certified)) + + base_address_query() + |> where([address: address], address.hash == ^address_hash) + |> join( + :left, + [address: address], + address_name in subquery( + from(name in Address.Name, + where: name.address_hash == ^address_hash, + order_by: [desc: name.primary], + limit: 1 ) - - _ -> - nil - end + ), + on: address.hash == address_name.address_hash, + as: :address_name + ) + |> select(^address_search_fields) end - defp valid_full_hash?(string_input) do - case Chain.string_to_transaction_hash(string_input) do - {:ok, _transaction_hash} -> true - _ -> false - end + defp base_address_query do + from(address in Address, + as: :address, + left_join: smart_contract in SmartContract, + as: :smart_contract, + on: address.hash == smart_contract.address_hash + ) end - defp search_transaction_query(term) do + defp search_transaction_query(hash) do if DenormalizationHelper.transactions_denormalization_finished?() do transaction_search_fields = search_fields() - |> Map.put(:transaction_hash, dynamic([transaction], transaction.hash)) - |> Map.put(:block_hash, dynamic([transaction], transaction.block_hash)) + |> Map.put(:transaction_hash, dynamic([transaction: transaction], transaction.hash)) + |> Map.put(:block_hash, dynamic([transaction: transaction], transaction.block_hash)) |> Map.put(:type, "transaction") - |> Map.put(:block_number, dynamic([transaction], transaction.block_number)) - |> Map.put(:inserted_at, dynamic([transaction], transaction.inserted_at)) - |> Map.put(:timestamp, dynamic([transaction], transaction.block_timestamp)) + |> Map.put(:block_number, dynamic([transaction: transaction], transaction.block_number)) + |> Map.put(:inserted_at, dynamic([transaction: transaction], transaction.inserted_at)) + |> Map.put(:timestamp, dynamic([transaction: transaction], transaction.block_timestamp)) from(transaction in Transaction, - where: transaction.hash == ^term, + as: :transaction, + where: transaction.hash == ^hash, select: ^transaction_search_fields ) else transaction_search_fields = search_fields() - |> Map.put(:transaction_hash, dynamic([transaction, _], transaction.hash)) - |> Map.put(:block_hash, dynamic([transaction, _], transaction.block_hash)) + |> Map.put(:transaction_hash, dynamic([transaction: transaction], transaction.hash)) + |> Map.put(:block_hash, dynamic([transaction: transaction], transaction.block_hash)) |> Map.put(:type, "transaction") - |> Map.put(:block_number, dynamic([transaction, _], transaction.block_number)) - |> Map.put(:inserted_at, dynamic([transaction, _], transaction.inserted_at)) - |> Map.put(:timestamp, dynamic([_, block], block.timestamp)) + |> Map.put(:block_number, dynamic([transaction: transaction], transaction.block_number)) + |> Map.put(:inserted_at, dynamic([transaction: transaction], transaction.inserted_at)) + |> Map.put(:timestamp, dynamic([block: block], block.timestamp)) from(transaction in Transaction, + as: :transaction, left_join: block in Block, + as: :block, on: transaction.block_hash == block.hash, - where: transaction.hash == ^term, + where: transaction.hash == ^hash, select: ^transaction_search_fields ) end @@ -452,15 +562,17 @@ defmodule Explorer.Chain.Search do defp search_user_operation_query(term) do user_operation_search_fields = search_fields() - |> Map.put(:user_operation_hash, dynamic([user_operation, _], user_operation.hash)) - |> Map.put(:block_hash, dynamic([user_operation, _], user_operation.block_hash)) + |> Map.put(:user_operation_hash, dynamic([user_operation: user_operation], user_operation.hash)) + |> Map.put(:block_hash, dynamic([user_operation: user_operation], user_operation.block_hash)) |> Map.put(:type, "user_operation") - |> Map.put(:inserted_at, dynamic([user_operation, _], user_operation.inserted_at)) - |> Map.put(:block_number, dynamic([user_operation, _], user_operation.block_number)) - |> Map.put(:timestamp, dynamic([_, block], block.timestamp)) + |> Map.put(:inserted_at, dynamic([user_operation: user_operation], user_operation.inserted_at)) + |> Map.put(:block_number, dynamic([user_operation: user_operation], user_operation.block_number)) + |> Map.put(:timestamp, dynamic([block: block], block.timestamp)) from(user_operation in UserOperation, + as: :user_operation, left_join: block in Block, + as: :block, on: user_operation.block_hash == block.hash, where: user_operation.hash == ^term, select: ^user_operation_search_fields @@ -470,77 +582,147 @@ defmodule Explorer.Chain.Search do defp search_blob_query(term) do blob_search_fields = search_fields() - |> Map.put(:blob_hash, dynamic([blob, _], blob.hash)) + |> Map.put(:blob_hash, dynamic([blob: blob], blob.hash)) |> Map.put(:type, "blob") - |> Map.put(:inserted_at, dynamic([blob, _], blob.inserted_at)) + |> Map.put(:inserted_at, dynamic([blob: blob], blob.inserted_at)) from(blob in Blob, + as: :blob, where: blob.hash == ^term, select: ^blob_search_fields ) end - defp search_block_query(term) do + defp search_block_by_hash_query(hash) do + search_block_base_query() + |> where([block: block], block.hash == ^hash) + end + + defp search_block_by_number_query(number) do + search_block_base_query() + |> where([block: block], block.number == ^number) + end + + defp search_block_base_query do block_search_fields = search_fields() - |> Map.put(:block_hash, dynamic([block], block.hash)) + |> Map.put(:block_hash, dynamic([block: block], block.hash)) |> Map.put(:type, "block") - |> Map.put(:block_number, dynamic([block], block.number)) - |> Map.put(:inserted_at, dynamic([block], block.inserted_at)) - |> Map.put(:timestamp, dynamic([block], block.timestamp)) - - case Chain.string_to_block_hash(term) do - {:ok, block_hash} -> - from(block in Block, - where: block.hash == ^block_hash, - select: ^block_search_fields - ) + |> Map.put(:block_number, dynamic([block: block], block.number)) + |> Map.put(:inserted_at, dynamic([block: block], block.inserted_at)) + |> Map.put(:timestamp, dynamic([block: block], block.timestamp)) - _ -> - case ExplorerHelper.safe_parse_non_negative_integer(term) do - {:ok, block_number} -> - from(block in Block, - where: block.number == ^block_number, - select: ^block_search_fields - ) - - _ -> - nil - end - end + from(block in Block, + as: :block, + select: ^block_search_fields + ) end - defp page_search_results(query, %PagingOptions{key: nil}), do: query + defp page_search_results( + query, + %PagingOptions{ + key: %{ + "label" => %{ + "name" => name, + "inserted_at" => inserted_at + } + }, + page_size: page_size + }, + "label" + ) do + query + |> page_with_sorting( + %PagingOptions{ + key: %{ + display_name: name, + inserted_at: inserted_at + }, + page_size: page_size + }, + [], + [{:asc, :display_name, :address_tag}, {:desc, :inserted_at, :address_to_tag}] + ) + end - defp page_search_results(query, %PagingOptions{ - key: {_address_hash, _transaction_hash, _block_hash, holder_count, name, inserted_at, item_type} - }) - when holder_count in [nil, ""] do - where( - query, - [item], - (item.name > ^name and item.type == ^item_type) or - (item.name == ^name and item.inserted_at < ^inserted_at and - item.type == ^item_type) or - item.type != ^item_type + defp page_search_results( + query, + %PagingOptions{ + key: %{ + "contract" => %{ + "certified" => certified, + "name" => name, + "inserted_at" => inserted_at + } + }, + page_size: page_size + }, + "contract" + ) do + query + |> page_with_sorting( + %PagingOptions{ + key: %{ + certified: parse_possible_nil(certified), + name: parse_possible_nil(name), + inserted_at: inserted_at + }, + page_size: page_size + }, + [], + [ + {:desc_nulls_last, :certified, :smart_contract}, + {:asc, :name, :smart_contract}, + {:desc, :inserted_at, :smart_contract} + ] ) end - # credo:disable-for-next-line - defp page_search_results(query, %PagingOptions{ - key: {_address_hash, _transaction_hash, _block_hash, holder_count, name, inserted_at, item_type} - }) do - where( - query, - [item], - (item.holder_count < ^holder_count and item.type == ^item_type) or - (item.holder_count == ^holder_count and item.name > ^name and item.type == ^item_type) or - (item.holder_count == ^holder_count and item.name == ^name and item.inserted_at < ^inserted_at and - item.type == ^item_type) or - item.type != ^item_type + defp page_search_results( + query, + %PagingOptions{ + key: %{ + "token" => %{ + "circulating_market_cap" => circulating_market_cap, + "fiat_value" => fiat_value, + "is_verified_via_admin_panel" => is_verified_via_admin_panel, + "holder_count" => holder_count, + "name" => name, + "inserted_at" => inserted_at + } + }, + page_size: page_size + }, + "token" + ) do + query + |> page_with_sorting( + %PagingOptions{ + key: %{ + circulating_market_cap: parse_possible_nil(circulating_market_cap), + fiat_value: parse_possible_nil(fiat_value), + is_verified_via_admin_panel: parse_possible_nil(is_verified_via_admin_panel), + holder_count: parse_possible_nil(holder_count), + name: name, + inserted_at: inserted_at + }, + page_size: page_size + }, + [], + [ + {:desc_nulls_last, :circulating_market_cap, :token}, + {:desc_nulls_last, :fiat_value, :token}, + {:desc_nulls_last, :is_verified_via_admin_panel, :token}, + {:desc_nulls_last, :holder_count, :token}, + {:asc, :name, :token}, + {:desc, :inserted_at, :token} + ] ) end + defp page_search_results(query, %PagingOptions{page_size: page_size}, _query_type), + do: limit(query, ^page_size) + defp take_all_categories([], taken_lengths, _remained), do: taken_lengths defp take_all_categories(lengths, taken_lengths, remained) do @@ -603,16 +785,18 @@ defmodule Explorer.Chain.Search do end defp search_ens_name(search_query, options) do - if result = search_ens_name_in_bens(search_query) do - [ - result[:address_hash] - |> search_address_query() - |> ExplorerHelper.maybe_hide_scam_addresses(:hash) - |> select_repo(options).all() - |> merge_address_search_result_with_ens_info(result) - ] - else - [] + case search_ens_name_in_bens(search_query) do + {ens_result, address_hash} -> + [ + address_hash + |> search_address_by_address_hash_query() + |> ExplorerHelper.maybe_hide_scam_addresses(:hash) + |> select_repo(options).all() + |> merge_address_search_result_with_ens_info(ens_result) + ] + + _ -> + [] end end @@ -620,13 +804,21 @@ defmodule Explorer.Chain.Search do Try to resolve ENS domain via BENS """ @spec search_ens_name_in_bens(binary()) :: - nil | %{address_hash: binary(), expiry_date: any(), name: any(), names_count: non_neg_integer()} + nil + | {%{ + address_hash: binary(), + expiry_date: any(), + name: any(), + names_count: non_neg_integer(), + protocol: any() + }, Hash.Address.t()} def search_ens_name_in_bens(search_query) do trimmed_query = String.trim(search_query) with true <- Regex.match?(~r/\w+\.\w+/, trimmed_query), - %{address_hash: _address_hash} = result <- ens_domain_name_lookup(search_query) do - result + %{address_hash: address_hash_string} = result <- ens_domain_name_lookup(search_query), + {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string) do + {result, address_hash} else _ -> nil @@ -652,11 +844,11 @@ defmodule Explorer.Chain.Search do defp search_fields do %{ - address_hash: dynamic([_], type(^nil, :binary)), - transaction_hash: dynamic([_], type(^nil, :binary)), - user_operation_hash: dynamic([_], type(^nil, :binary)), - blob_hash: dynamic([_], type(^nil, :binary)), - block_hash: dynamic([_], type(^nil, :binary)), + address_hash: dynamic(type(^nil, :binary)), + transaction_hash: dynamic(type(^nil, :binary)), + user_operation_hash: dynamic(type(^nil, :binary)), + blob_hash: dynamic(type(^nil, :binary)), + block_hash: dynamic(type(^nil, :binary)), type: nil, name: nil, symbol: nil, @@ -665,7 +857,7 @@ defmodule Explorer.Chain.Search do block_number: 0, icon_url: nil, token_type: nil, - timestamp: dynamic([_, _], type(^nil, :utc_datetime_usec)), + timestamp: dynamic(type(^nil, :utc_datetime_usec)), verified: nil, certified: nil, exchange_rate: nil, @@ -675,4 +867,212 @@ defmodule Explorer.Chain.Search do is_verified_via_admin_panel: nil } end + + defp token_search_fields do + search_fields() + |> Map.put(:address_hash, dynamic([token: token], token.contract_address_hash)) + |> Map.put(:type, "token") + |> Map.put(:name, dynamic([token: token], token.name)) + |> Map.put(:symbol, dynamic([token: token], token.symbol)) + |> Map.put(:holder_count, dynamic([token: token], token.holder_count)) + |> Map.put(:inserted_at, dynamic([token: token], token.inserted_at)) + |> Map.put(:icon_url, dynamic([token: token], token.icon_url)) + |> Map.put(:token_type, dynamic([token: token], token.type)) + |> Map.put(:exchange_rate, dynamic([token: token], token.fiat_value)) + |> Map.put(:total_supply, dynamic([token: token], token.total_supply)) + |> Map.put(:circulating_market_cap, dynamic([token: token], token.circulating_market_cap)) + |> Map.put(:is_verified_via_admin_panel, dynamic([token: token], token.is_verified_via_admin_panel)) + |> Map.put(:verified, dynamic([smart_contract: smart_contract], not is_nil(smart_contract))) + |> Map.put(:certified, dynamic([smart_contract: smart_contract], smart_contract.certified)) + end + + @paginated_types ["label", "contract", "token"] + + defp trim_list_and_prepare_next_page_params(items, %PagingOptions{page_size: page_size, key: prev_options}, query) + when length(items) > page_size - 1 do + items = items |> Enum.drop(-1) + prev_options = prev_options || %{} + + base_params = + Map.merge( + %{"next_page_params_type" => "search", "q" => query}, + prev_options + ) + + {paging_options, _types} = + items + |> Enum.reverse() + |> Enum.reduce_while({base_params, @paginated_types}, fn + _item, {_paging_options, []} = acc -> + {:halt, acc} + + item, {paging_options, types} = acc -> + if item.type in types do + {:cont, {Map.put(paging_options, item.type, paging_params(item)), List.delete(types, item.type)}} + else + {:cont, acc} + end + end) + + {items, paging_options} + end + + defp trim_list_and_prepare_next_page_params(items, _paging_options, _query), do: {items, nil} + + defp paging_params(%{ + name: name, + inserted_at: inserted_at, + type: "label" + }) do + inserted_at_datetime = DateTime.to_iso8601(inserted_at) + + %{ + "name" => name, + "inserted_at" => inserted_at_datetime + } + end + + defp paging_params(%{ + circulating_market_cap: circulating_market_cap, + exchange_rate: exchange_rate, + is_verified_via_admin_panel: is_verified_via_admin_panel, + holder_count: holder_count, + name: name, + inserted_at: inserted_at, + type: "token" + }) do + inserted_at_datetime = DateTime.to_iso8601(inserted_at) + + %{ + "circulating_market_cap" => circulating_market_cap, + "fiat_value" => exchange_rate, + "is_verified_via_admin_panel" => is_verified_via_admin_panel, + "holder_count" => holder_count, + "name" => name, + "inserted_at" => inserted_at_datetime + } + end + + defp paging_params(%{ + certified: certified, + name: name, + inserted_at: inserted_at, + type: "contract" + }) do + inserted_at_datetime = DateTime.to_iso8601(inserted_at) + + %{ + "certified" => certified, + "name" => name, + "inserted_at" => inserted_at_datetime + } + end + + @doc """ + Parses paging options from the given parameters when the `next_page_params_type` is "search". + + ## Parameters + + - paging_params: A map containing the paging parameters, including "next_page_params_type". + + ## Returns + + A keyword list with paging options, where key is the map with the parsed paging options. + """ + @spec parse_paging_options(map()) :: [paging_options: PagingOptions.t()] + def parse_paging_options(%{"next_page_params_type" => "search"} = paging_params) do + key = + Enum.reduce(@paginated_types, %{}, fn type, acc -> + if Map.has_key?(paging_params, type) do + Map.put(acc, type, paging_options(paging_params[type])) + else + acc + end + end) + + [paging_options: %{default_paging_options() | key: key}] + end + + def parse_paging_options(_) do + [paging_options: default_paging_options()] + end + + defp paging_options(paging_options) when is_map(paging_options) do + paging_options + end + + defp paging_options(_), do: nil + + defp parse_possible_nil(""), do: nil + defp parse_possible_nil("null"), do: nil + defp parse_possible_nil(other), do: other + + @spec maybe_parse_filecoin_address(binary()) :: + :ignore + | {:ok, Explorer.Chain.Filecoin.IDAddress.t()} + | {:ok, Explorer.Chain.Filecoin.NativeAddress.t()} + | :error + def maybe_parse_filecoin_address(string) + + if @chain_type == :filecoin do + def maybe_parse_filecoin_address(string) do + # credo:disable-for-lines:2 Credo.Check.Design.AliasUsage + id_address_result = Explorer.Chain.Filecoin.IDAddress.cast(string) + native_address_result = Explorer.Chain.Filecoin.NativeAddress.cast(string) + + cond do + match?({:ok, _id_address}, id_address_result) -> + id_address_result + + match?({:ok, _native_address}, native_address_result) -> + native_address_result + + true -> + :error + end + end + else + def maybe_parse_filecoin_address(_), do: :ignore + end + + @spec address_by_filecoin_id_or_robust( + Explorer.Chain.Filecoin.IDAddress.t() + | Explorer.Chain.Filecoin.NativeAddress.t() + ) :: Ecto.Query.t() | nil + def address_by_filecoin_id_or_robust(address) + + if @chain_type == :filecoin do + def address_by_filecoin_id_or_robust(%Explorer.Chain.Filecoin.IDAddress{} = id) do + base_filecoin_address_query() + |> where([address], address.filecoin_id == ^id) + end + + def address_by_filecoin_id_or_robust(%Explorer.Chain.Filecoin.NativeAddress{} = robust) do + base_filecoin_address_query() + |> where([address], address.filecoin_robust == ^robust) + end + + defp base_filecoin_address_query do + address_search_fields = + search_fields() + |> Map.put(:address_hash, dynamic([address: address], address.hash)) + |> Map.put(:type, "address") + |> Map.put(:name, dynamic([address_name: address_name], address_name.name)) + |> Map.put(:inserted_at, dynamic([address: address], address.inserted_at)) + |> Map.put(:verified, dynamic([address: address], address.verified)) + |> Map.put(:certified, dynamic([smart_contract: smart_contract], smart_contract.certified)) + + base_address_query() + |> join( + :left, + [address: address], + address_name in Address.Name, + on: address.hash == address_name.address_hash, + as: :address_name + ) + |> select(^address_search_fields) + end + else + def address_by_filecoin_id_or_robust(_), do: nil + end end diff --git a/apps/explorer/lib/explorer/chain/smart_contract.ex b/apps/explorer/lib/explorer/chain/smart_contract.ex index e0c587e8ec31..be3b0c77b5e9 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract.ex @@ -62,7 +62,7 @@ defmodule Explorer.Chain.SmartContract.Schema do field(:license_type, Ecto.Enum, values: @license_enum, default: :none) field(:certified, :boolean) field(:is_blueprint, :boolean) - field(:language, Ecto.Enum, values: [solidity: 1, vyper: 2, yul: 3, stylus_rust: 4], default: :solidity) + field(:language, Ecto.Enum, values: @languages_enum, default: :solidity) has_many( :decompiled_smart_contracts, @@ -136,7 +136,12 @@ defmodule Explorer.Chain.SmartContract do @burn_address_hash_string "0x0000000000000000000000000000000000000000" @dead_address_hash_string "0x000000000000000000000000000000000000dEaD" - @required_attrs ~w(compiler_version optimization address_hash contract_code_md5 language)a + @default_required_attrs ~w(optimization address_hash contract_code_md5 language)a + @chain_type_required_attrs (case @chain_type do + :zilliqa -> ~w()a + _ -> ~w(compiler_version)a + end) + @required_attrs @default_required_attrs ++ @chain_type_required_attrs @optional_common_attrs ~w(name contract_source_code evm_version optimization_runs constructor_arguments verified_via_sourcify verified_via_eth_bytecode_db verified_via_verifier_alliance partially_verified file_path is_vyper_contract is_changed_bytecode bytecode_checked_at autodetect_constructor_args license_type certified is_blueprint)a @@ -150,10 +155,19 @@ defmodule Explorer.Chain.SmartContract do :arbitrum -> ~w(package_name github_repository_metadata)a + :zilliqa -> + ~w(compiler_version)a + _ -> ~w()a end) + @chain_type_attrs_for_validation ~w(contract_source_code)a ++ + (case @chain_type do + :zilliqa -> ~w()a + _ -> ~w(name)a + end) + @create_zksync_abi [ %{ "inputs" => [ @@ -179,6 +193,27 @@ defmodule Explorer.Chain.SmartContract do } ] + @default_languages ~w(solidity vyper yul stylus_rust)a + @chain_type_languages (case @chain_type do + :zilliqa -> + ~w(scilla)a + + _ -> + ~w()a + end) + + @languages @default_languages ++ @chain_type_languages + @languages_enum @languages |> Enum.with_index(1) + @language_string_to_atom @languages |> Map.new(&{to_string(&1), &1}) + + @doc """ + Returns list of languages supported by the database schema. + """ + @spec language_string_to_atom() :: %{String.t() => atom()} + def language_string_to_atom do + @language_string_to_atom + end + @doc """ Returns burn address hash """ @@ -343,9 +378,9 @@ defmodule Explorer.Chain.SmartContract do * `"outputs" - `t:list/0` of `t:output/0`. * `"stateMutability"` - `t:state_mutability/0` * `"payable"` - `t:payable/0`. - **WARNING:** Deprecated and will be removed in the future. Use `"stateMutability"` instead. + **WARNING:** Deprecated and will be removed in the future. Use `"stateMutability"` instead. * `"constant"` - `t:constant/0`. - **WARNING:** Deprecated and will be removed in the future. Use `"stateMutability"` instead. + **WARNING:** Deprecated and will be removed in the future. Use `"stateMutability"` instead. """ @type function_description :: %{ String.t() => @@ -445,7 +480,9 @@ defmodule Explorer.Chain.SmartContract do @optional_changeset_attrs ++ @chain_type_optional_attrs - required_for_validation = [:name, :contract_source_code] ++ @required_attrs + required_for_validation = + @required_attrs ++ + @chain_type_attrs_for_validation smart_contract |> cast(attrs, attrs_to_cast) @@ -1322,6 +1359,8 @@ defmodule Explorer.Chain.SmartContract do ) end + defp filter_contracts(basic_query, nil), do: basic_query + defp filter_contracts(basic_query, :solidity) do basic_query |> where(is_vyper_contract: ^false) @@ -1336,7 +1375,11 @@ defmodule Explorer.Chain.SmartContract do from(query in basic_query, where: is_nil(query.abi)) end - defp filter_contracts(basic_query, _), do: basic_query + defp filter_contracts(basic_query, language) do + from(query in basic_query, + where: query.language == ^language + ) + end @doc """ Retrieves the constructor arguments for a zkSync smart contract. diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex index 641dd1c6a4c5..ac483648e67e 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/verification_status.ex @@ -8,6 +8,7 @@ defmodule Explorer.Chain.SmartContract.Proxy.VerificationStatus do import Ecto.Changeset alias Explorer.Chain.Hash + alias Explorer.Chain.SmartContract.Proxy.Models.Implementation alias Explorer.{Chain, Repo} @typep status :: integer() | atom() @@ -109,13 +110,8 @@ defmodule Explorer.Chain.SmartContract.Proxy.VerificationStatus do @doc """ Sets proxy verification result """ - @spec set_proxy_verification_result({[String.t()] | :empty | :error, [String.t()] | :empty | :error}, String.t()) :: - __MODULE__.t() - def set_proxy_verification_result({empty_or_error, _}, uid) when empty_or_error in [:empty, :error], - do: update_status(uid, :fail) + @spec set_proxy_verification_result(Implementation.t() | :empty | :error, String.t()) :: __MODULE__.t() + def set_proxy_verification_result(%Implementation{}, uid), do: update_status(uid, :pass) - def set_proxy_verification_result({[], _}, uid), - do: update_status(uid, :fail) - - def set_proxy_verification_result({_, _}, uid), do: update_status(uid, :pass) + def set_proxy_verification_result(_empty_or_error, uid), do: update_status(uid, :fail) end diff --git a/apps/explorer/lib/explorer/chain/token/instance.ex b/apps/explorer/lib/explorer/chain/token/instance.ex index 4dc9d44aae9c..ae23a490d7da 100644 --- a/apps/explorer/lib/explorer/chain/token/instance.ex +++ b/apps/explorer/lib/explorer/chain/token/instance.ex @@ -9,6 +9,7 @@ defmodule Explorer.Chain.Token.Instance do alias Explorer.Chain.{Address, Hash, Token, TokenTransfer} alias Explorer.Chain.Address.CurrentTokenBalance alias Explorer.Chain.Token.Instance + alias Explorer.Chain.Token.Instance.Thumbnails alias Explorer.PagingOptions @timeout 60_000 @@ -21,6 +22,9 @@ defmodule Explorer.Chain.Token.Instance do * `refetch_after` - when to refetch the token instance * `retries_count` - number of times the token instance has been retried * `is_banned` - if the token instance is banned + * `thumbnails` - info for deriving thumbnails urls. Stored as array: [file_path, sizes, original_uploaded?] + * `media_type` - mime type of media + * `cdn_upload_error` - error while processing(resizing)/uploading media to CDN """ @primary_key false typed_schema "token_instances" do @@ -34,6 +38,9 @@ defmodule Explorer.Chain.Token.Instance do field(:refetch_after, :utc_datetime_usec) field(:retries_count, :integer) field(:is_banned, :boolean, default: false) + field(:thumbnails, Thumbnails) + field(:media_type, :string) + field(:cdn_upload_error, :string) belongs_to(:owner, Address, foreign_key: :owner_address_hash, references: :hash, type: Hash.Address) @@ -62,7 +69,10 @@ defmodule Explorer.Chain.Token.Instance do :owner_updated_at_log_index, :refetch_after, :retries_count, - :is_banned + :is_banned, + :thumbnails, + :media_type, + :cdn_upload_error ]) |> validate_required([:token_id, :token_contract_address_hash]) |> foreign_key_constraint(:token_contract_address_hash) @@ -626,7 +636,7 @@ defmodule Explorer.Chain.Token.Instance do @doc """ Sets set_metadata for the given Explorer.Chain.Token.Instance """ - @spec set_metadata(__MODULE__, map()) :: {non_neg_integer(), nil} + @spec set_metadata(t(), map()) :: {non_neg_integer(), nil} def set_metadata(token_instance, metadata) when is_map(metadata) do now = DateTime.utc_now() @@ -635,7 +645,7 @@ defmodule Explorer.Chain.Token.Instance do where: instance.token_contract_address_hash == ^token_instance.token_contract_address_hash, where: instance.token_id == ^token_instance.token_id ), - [set: [metadata: metadata, error: nil, updated_at: now]], + [set: [metadata: metadata, error: nil, updated_at: now, thumbnails: nil, media_type: nil, cdn_upload_error: nil]], timeout: @timeout ) end @@ -681,4 +691,183 @@ defmodule Explorer.Chain.Token.Instance do end) && interval end) || 13 end + + @doc """ + Retrieves the media URL from the given NFT metadata. + + ## Parameters + + - metadata: A map containing the metadata of the NFT. + + ## Returns + + - The media URL as a string if found in the metadata, otherwise `nil`. + + ## Examples + + iex> metadata = %{"image" => "https://example.com/image.png"} + iex> get_media_url_from_metadata_for_nft_media_handler(metadata) + "https://example.com/image.png" + + iex> metadata = %{"animation_url" => "https://example.com/animation.mp4"} + iex> get_media_url_from_metadata_for_nft_media_handler(metadata) + "https://example.com/animation.mp4" + + iex> metadata = %{} + iex> get_media_url_from_metadata_for_nft_media_handler(metadata) + nil + """ + @spec get_media_url_from_metadata_for_nft_media_handler(nil | map()) :: nil | binary() + def get_media_url_from_metadata_for_nft_media_handler(metadata) when is_map(metadata) do + result = + cond do + is_binary(metadata["image_url"]) -> + metadata["image_url"] + + is_binary(metadata["image"]) -> + metadata["image"] + + is_map(metadata["properties"]) && is_binary(metadata["properties"]["image"]) -> + metadata["properties"]["image"] + + is_binary(metadata["animation_url"]) -> + metadata["animation_url"] + + true -> + nil + end + + if result && String.trim(result) == "", do: nil, else: result + end + + def get_media_url_from_metadata_for_nft_media_handler(nil), do: nil + + @doc """ + Sets the media URLs for a given token. + + ## Parameters + + - `token_contract_address_hash`: The hash of the token contract address. + - `token_id`: The ID of the token. + - `urls`: list of Explorer.Chain.Token.Instance.Thumbnails format + - `media_type`: The type of media associated with the URLs. + + ## Examples + + iex> set_media_urls({"0x1234", 1}, ["/folder_1/0004dfda159ea2def5098bf8f19f5f27207f4e1f_{}.png", [60, 250, 500], true], {"image", "png"}) + :ok + + """ + @spec set_media_urls({Hash.Address.t(), non_neg_integer() | Decimal.t()}, list(), {binary(), binary()}) :: + any() + def set_media_urls({token_contract_address_hash, token_id}, urls, media_type) do + now = DateTime.utc_now() + + token_id + |> token_instance_query(token_contract_address_hash) + |> Repo.update_all( + [set: [thumbnails: urls, media_type: media_type_to_string(media_type), updated_at: now]], + timeout: @timeout + ) + end + + @doc """ + Sets the CDN upload error for a given token. + + ## Parameters + + - `token_contract_address_hash`: The hash of the token contract address. + - `token_id`: The ID of the token. + - `error`: The error message to be set. + + ## Examples + + iex> set_cdn_upload_error({"0x1234", 1}, "Upload failed") + :ok + + """ + @spec set_cdn_upload_error({Hash.Address.t(), non_neg_integer() | Decimal.t()}, binary()) :: any() + def set_cdn_upload_error({token_contract_address_hash, token_id}, error) do + now = DateTime.utc_now() + + token_id + |> token_instance_query(token_contract_address_hash) + |> Repo.update_all( + [set: [cdn_upload_error: error, updated_at: now]], + timeout: @timeout + ) + end + + @doc """ + Streams instances that need to be resized and uploaded. + + ## Parameters + + - each_fun: A function to be applied to each instance. + """ + @spec stream_instances_to_resize_and_upload((t() -> any())) :: any() + def stream_instances_to_resize_and_upload(each_fun) do + __MODULE__ + |> where([ti], not is_nil(ti.metadata) and is_nil(ti.thumbnails) and is_nil(ti.cdn_upload_error)) + |> Repo.stream_each(each_fun) + end + + @doc """ + Sets the CDN result for a given token. + + ## Parameters + + - `token_contract_address_hash`: The hash of the token contract address. + - `token_id`: The ID of the token. + - `params`: A map containing the parameters for the CDN result. + + ## Returns + + - The result of setting the CDN for the given token instance. + + """ + @spec set_cdn_result({Hash.Address.t(), non_neg_integer() | Decimal.t()}, %{ + :cdn_upload_error => any(), + :media_type => any(), + :thumbnails => any() + }) :: any() + def set_cdn_result({token_contract_address_hash, token_id}, %{ + thumbnails: thumbnails, + media_type: media_type, + cdn_upload_error: cdn_upload_error + }) do + now = DateTime.utc_now() + + token_id + |> token_instance_query(token_contract_address_hash) + |> Repo.update_all( + [ + set: [ + cdn_upload_error: cdn_upload_error, + thumbnails: thumbnails, + media_type: media_type, + updated_at: now + ] + ], + timeout: @timeout + ) + end + + @doc """ + Converts a media type tuple to a string. + + ## Parameters + - media_type: A tuple containing two binaries representing the media type. + + ## Returns + - A non-empty binary string representation of the media type. + + ## Examples + iex> media_type_to_string({"image", "png"}) + "image/png" + """ + @spec media_type_to_string({binary(), binary()}) :: nonempty_binary() + def media_type_to_string({type, subtype}) do + "#{type}/#{subtype}" + end end diff --git a/apps/explorer/lib/explorer/chain/token/instance/media_urls.ex b/apps/explorer/lib/explorer/chain/token/instance/media_urls.ex new file mode 100644 index 000000000000..26f1bc2e972f --- /dev/null +++ b/apps/explorer/lib/explorer/chain/token/instance/media_urls.ex @@ -0,0 +1,51 @@ +defmodule Explorer.Chain.Token.Instance.Thumbnails do + @moduledoc """ + Module defines thumbnails type for token instances + """ + use Ecto.Type + + @type t :: {String.t(), [integer()], boolean()} + + def type, do: :map + + def cast([file_path, sizes, original_uploaded?]) + when is_binary(file_path) and is_list(sizes) and is_boolean(original_uploaded?) do + if Enum.all?(sizes, &is_integer/1) do + {:ok, [file_path, sizes, original_uploaded?]} + else + :error + end + end + + def cast(_), do: :error + + def load([file_path, sizes, original_uploaded?]) do + uri = + Application.get_env(:ex_aws, :s3)[:public_r2_url] |> URI.parse() |> URI.append_path(file_path) |> URI.to_string() + + thumbnails = + sizes + |> Enum.map(fn size -> + key = "#{size}x#{size}" + {key, String.replace(uri, "{}", key)} + end) + |> Enum.into(%{}) + + {:ok, + if original_uploaded? do + key = "original" + Map.put(thumbnails, key, String.replace(uri, "{}", key)) + else + thumbnails + end} + end + + def load(_), do: :error + + def dump([file_path, sizes, original_uploaded?]) + when is_binary(file_path) and is_list(sizes) and is_boolean(original_uploaded?) do + {:ok, [file_path, sizes, original_uploaded?]} + end + + def dump(_), do: :error +end diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 23f116b5487c..09a341864ef6 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -561,7 +561,7 @@ defmodule Explorer.Chain.Transaction do iex> changeset.valid? true - A collated transaction MUST have an `index` so its position in the `block` is known and the `cumulative_gas_used` ane + A collated transaction MUST have an `index` so its position in the `block` is known and the `cumulative_gas_used` and `gas_used` to know its fees. Post-Byzantium, the status must be present when a block is collated. diff --git a/apps/explorer/lib/explorer/chain/transaction/history/historian.ex b/apps/explorer/lib/explorer/chain/transaction/history/historian.ex index fe09465b7abe..901ec9223dc6 100644 --- a/apps/explorer/lib/explorer/chain/transaction/history/historian.ex +++ b/apps/explorer/lib/explorer/chain/transaction/history/historian.ex @@ -15,17 +15,58 @@ defmodule Explorer.Chain.Transaction.History.Historian do @behaviour Historian + @typedoc """ + Chain performance stats for a specific date. + """ + @type dated_record :: %{ + required(:date) => Date.t(), + required(:number_of_transactions) => non_neg_integer(), + required(:gas_used) => non_neg_integer(), + required(:total_fee) => non_neg_integer() + } + + # Chain performance stats. + @typep record :: %{ + number_of_transactions: non_neg_integer(), + gas_used: non_neg_integer(), + total_fee: non_neg_integer() + } + @impl Historian + @doc """ + Compiles transaction statistics for a specified number of days. + + This function recursively collects daily transaction statistics, starting + from the earliest date in the range and moving forward towards the current + date. The current day's stats are set to zero to avoid presenting incomplete + data. + + The function attempts to find the appropriate block range for each day and + compile statistics. If block range determination fails, it employs a fallback + method or sets the day's stats to zero. + + ## Parameters + - `num_days`: The number of days to compile records for. + - `records`: An accumulator for the compiled records. Defaults to an empty list. + + ## Returns + - `{:ok, [dated_record()]}`: A list of daily transaction statistics on success. + - `:error`: If an unrecoverable error occurs during compilation. + """ + @spec compile_records(non_neg_integer(), [dated_record()]) :: {:ok, [dated_record()]} | :error def compile_records(num_days, records \\ []) do Logger.info("tx/per day chart: collect records for transactions per day stats") if num_days == 1 do Logger.info("tx/per day chart: records collected #{inspect(records)}") + # The recourse is finished, and the stats for the current day are set to zero + # to avoid presenting incomplete data. records = [%{date: date_today(), number_of_transactions: 0, gas_used: 0, total_fee: 0} | records] - # base case {:ok, records} else + # Calculate the date for which the stats are required by subtracting the specified + # number of days from the current moment, day_to_fetch = Date.add(date_today(), -1 * (num_days - 1)) earliest = datetime(day_to_fetch, ~T[00:00:00]) @@ -37,8 +78,11 @@ defmodule Explorer.Chain.Transaction.History.Historian do from_api = false + # Try to identify block range for the given day with {:ok, min_block} <- Chain.timestamp_to_block_number(earliest, :after, from_api), {:ok, max_block} <- Chain.timestamp_to_block_number(latest, :before, from_api) do + # Collects stats for the block range determining the given day and add + # the date determining the day to the record. record = min_block |> compile_records_in_range(max_block) @@ -49,6 +93,8 @@ defmodule Explorer.Chain.Transaction.History.Historian do | records ] + # By making recursive calls to collect stats for every next day, eventually + # all stats for the specified number of days will be collected. compile_records(num_days - 1, records) else _ -> @@ -56,6 +102,8 @@ defmodule Explorer.Chain.Transaction.History.Historian do "tx/per day chart: timestamp cannot be converted to min/max blocks, trying to find min/max blocks through a fallback option}" ) + # This approach to identify the block range for the given day does not take + # into account the consensus information in the blocks. min_max_block_query = from(block in Block, where: block.timestamp >= ^earliest and block.timestamp <= ^latest, @@ -64,6 +112,8 @@ defmodule Explorer.Chain.Transaction.History.Historian do case Repo.one(min_max_block_query, timeout: :infinity) do {min_block, max_block} when not is_nil(min_block) and not is_nil(max_block) -> + # Collects stats for the block range determining the given day and add + # the date determining the day to the record. record = min_block |> compile_records_in_range(max_block) @@ -74,9 +124,13 @@ defmodule Explorer.Chain.Transaction.History.Historian do | records ] + # By making recursive calls to collect stats for every next day, eventually + # all stats for the specified number of days will be collected. compile_records(num_days - 1, records) _ -> + # If it is not possible to identify the block range for the given day, + # the stats for the day are set to zero. Logger.warning("tx/per day chart: failed to get min/max blocks through a fallback option}") records = [%{date: day_to_fetch, number_of_transactions: 0, gas_used: 0, total_fee: 0} | records] compile_records(num_days - 1, records) @@ -85,9 +139,29 @@ defmodule Explorer.Chain.Transaction.History.Historian do end end + # Compiles transaction statistics for a given block range. + # + # This function aggregates data from transactions within the specified block + # range, considering only blocks with consensus. It calculates the number of + # transactions, total gas used, and total transaction fees. + # + # The function adapts its query strategy based on whether transaction + # denormalization has been completed, optimizing for performance in both cases. + # + # ## Parameters + # - `min_block`: The lower bound of the block range (inclusive). + # - `max_block`: The upper bound of the block range (inclusive). + # + # ## Returns + # A map containing the following keys: + # - `:number_of_transactions`: The total number of transactions in the range. + # - `:gas_used`: The total amount of gas used by all transactions in the range. + # - `:total_fee`: The sum of all transaction fees in the range. + @spec compile_records_in_range(non_neg_integer(), non_neg_integer()) :: record() defp compile_records_in_range(min_block, max_block) do Logger.info("tx/per day chart: min/max block numbers [#{min_block}, #{max_block}]") + # Build a query to receive all transactions in the given block range all_transactions_query = if DenormalizationHelper.transactions_denormalization_finished?() do from( @@ -103,6 +177,7 @@ defmodule Explorer.Chain.Transaction.History.Historian do ) end + # Build a query to receive all blocks in the given block range with consensus set to true all_blocks_query = from( block in Block, @@ -111,6 +186,9 @@ defmodule Explorer.Chain.Transaction.History.Historian do select: block.number ) + # Not actual if the block_consensus information is already the part of the transaction + # data. Otherwise, we need to filter out transactions that are in the blocks with consensus + # set to true. query = if DenormalizationHelper.transactions_denormalization_finished?() do all_transactions_query @@ -122,11 +200,14 @@ defmodule Explorer.Chain.Transaction.History.Historian do ) end + # Number of transactions in the given block range num_transactions = Repo.aggregate(query, :count, :hash, timeout: :infinity) Logger.info("tx/per day chart: num of transactions #{num_transactions}") + # Total gas used in the given block range gas_used = Repo.aggregate(query, :sum, :gas_used, timeout: :infinity) Logger.info("tx/per day chart: total gas used #{gas_used}") + # Build a query to receive the total fee in the given block range total_fee_query = if DenormalizationHelper.transactions_denormalization_finished?() do from(transaction in subquery(all_transactions_query), @@ -141,6 +222,7 @@ defmodule Explorer.Chain.Transaction.History.Historian do ) end + # Total fee in the given block range total_fee = Repo.one(total_fee_query, timeout: :infinity) Logger.info("tx/per day chart: total fee #{total_fee}") @@ -148,6 +230,20 @@ defmodule Explorer.Chain.Transaction.History.Historian do end @impl Historian + @doc """ + Saves transaction statistics records to the database. + + This function bulk inserts or updates the provided transaction statistics + records into the database. After saving the records, it broadcasts + a `:transaction_stats` event to notify subscribers of the update. + + ## Parameters + - `records`: A list of `dated_record()` structs containing transaction statistics. + + ## Returns + - The number of records inserted or updated. + """ + @spec save_records([dated_record()]) :: non_neg_integer() def save_records(records) do Logger.info("tx/per day chart: save records") @@ -160,12 +256,15 @@ defmodule Explorer.Chain.Transaction.History.Historian do num_inserted end + # Converts a given date and time to a UTC DateTime @spec datetime(Date.t(), Time.t()) :: DateTime.t() defp datetime(date, time) do {_success?, naive_dt} = NaiveDateTime.new(date, time) DateTime.from_naive!(naive_dt, "Etc/UTC") end + # Returns today's date in UTC, using configured value or current date as fallback. + @spec date_today() :: Date.t() defp date_today do HistoryProcess.config_or_default(:utc_today, Date.utc_today(), __MODULE__) end diff --git a/apps/explorer/lib/explorer/chain/transaction/history/transaction_stats.ex b/apps/explorer/lib/explorer/chain/transaction/history/transaction_stats.ex index 594d66bbae4a..f1a7f6eb67ff 100644 --- a/apps/explorer/lib/explorer/chain/transaction/history/transaction_stats.ex +++ b/apps/explorer/lib/explorer/chain/transaction/history/transaction_stats.ex @@ -1,6 +1,6 @@ defmodule Explorer.Chain.Transaction.History.TransactionStats do @moduledoc """ - Represents daily transaction numbers. + Represents daily chain performance stats """ import Ecto.Query, only: [from: 2] @@ -15,7 +15,7 @@ defmodule Explorer.Chain.Transaction.History.TransactionStats do ]} @typedoc """ - The recorded values of the number of transactions for a single day. + The recorded values of the chain performance stats for a single day. * `:date` - The date in UTC. * `:number_of_transactions` - Number of transactions processed by the vm for a given date. * `:gas_used` - Gas used in transactions per single day @@ -28,7 +28,25 @@ defmodule Explorer.Chain.Transaction.History.TransactionStats do field(:total_fee, :decimal) end - @spec by_date_range(Date.t(), Date.t()) :: [__MODULE__] + @doc """ + Retrieves transaction statistics within a specified date range. + + This function queries the database for transaction statistics recorded between + the given earliest and latest dates, inclusive. The results are ordered by + date in descending order. + + ## Parameters + - `earliest`: The start date of the range to query (inclusive). + - `latest`: The end date of the range to query (inclusive). + - `options`: Optional keyword list of options used to select the repo for the + query. + + ## Returns + A list of `Explorer.Chain.Transaction.History.TransactionStats` structs, + each representing the transaction statistics for a single day within the + specified range. + """ + @spec by_date_range(Date.t(), Date.t(), keyword()) :: [__MODULE__] def by_date_range(earliest, latest, options \\ []) do # Create a query query = diff --git a/apps/explorer/lib/explorer/chain/zilliqa/helper.ex b/apps/explorer/lib/explorer/chain/zilliqa/helper.ex new file mode 100644 index 000000000000..748ae1f1e059 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/zilliqa/helper.ex @@ -0,0 +1,18 @@ +defmodule Explorer.Chain.Zilliqa.Helper do + @moduledoc """ + Common helper functions for Zilliqa. + """ + + alias Explorer.Chain.Transaction + + @scilla_transactions_type 907_376 + + @doc """ + Checks if a transaction is a Scilla transaction. + + Scilla transactions have `type` set to #{@scilla_transactions_type}. + """ + @spec scilla_transaction?(Transaction.t() | integer()) :: boolean() + def scilla_transaction?(%Transaction{type: type}), do: scilla_transaction?(type) + def scilla_transaction?(type), do: type == @scilla_transactions_type +end diff --git a/apps/explorer/lib/explorer/chain/zilliqa/reader.ex b/apps/explorer/lib/explorer/chain/zilliqa/reader.ex new file mode 100644 index 000000000000..8a65532ee069 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/zilliqa/reader.ex @@ -0,0 +1,51 @@ +defmodule Explorer.Chain.Zilliqa.Reader do + @moduledoc """ + Reads Zilliqa-related data from the database. + """ + import Explorer.Chain, only: [add_fetcher_limit: 2] + import Ecto.Query, only: [from: 2] + + alias Explorer.Chain.{Address, Transaction} + alias Explorer.Repo + + @doc """ + Returns a stream of `t:Explorer.Chain.Address.t/0` for Scilla smart contracts + that should be displayed as verified. The stream yields unverified addresses + with fetched contract code created by transactions with `v` = `0`. + + ## Parameters + + - `initial`: The initial accumulator value for the stream. + - `reducer`: A function that processes each entry in the stream, receiving + the entry and the current accumulator, and returning a new accumulator. + - `limited?`: A boolean flag to indicate whether the result set should be + limited. Defaults to `false`. + + ## Returns + + - `{:ok, accumulator}`: The final accumulator value after streaming through + the unverified Scilla smart contract addresses. + """ + @spec stream_unverified_scilla_smart_contract_addresses( + initial :: accumulator, + reducer :: (entry :: term(), accumulator -> accumulator), + limited? :: boolean() + ) :: {:ok, accumulator} + when accumulator: term() + def stream_unverified_scilla_smart_contract_addresses(initial, reducer, limited? \\ false) + when is_function(reducer, 2) do + query = + from( + a in Address, + join: t in Transaction, + on: a.hash == t.created_contract_address_hash, + where: t.v == 0 and not is_nil(a.contract_code) and a.verified == false, + order_by: [desc: t.block_number], + select: a + ) + + query + |> add_fetcher_limit(limited?) + |> Repo.stream_reduce(initial, reducer) + end +end diff --git a/apps/explorer/lib/explorer/history/historian.ex b/apps/explorer/lib/explorer/history/historian.ex index 3fe41702c156..f0dcedeae27c 100644 --- a/apps/explorer/lib/explorer/history/historian.ex +++ b/apps/explorer/lib/explorer/history/historian.ex @@ -7,7 +7,8 @@ defmodule Explorer.History.Historian do Record of historical values for a specific date. """ @type record :: %{ - date: Date.t() + required(:date) => Date.t(), + optional(atom()) => any() } @doc """ diff --git a/apps/explorer/lib/explorer/history/process.ex b/apps/explorer/lib/explorer/history/process.ex index 79731459a346..3b9c190140d2 100644 --- a/apps/explorer/lib/explorer/history/process.ex +++ b/apps/explorer/lib/explorer/history/process.ex @@ -88,6 +88,19 @@ defmodule Explorer.History.Process do # Helper @typep milliseconds :: non_neg_integer() + @doc """ + Retrieves a configuration value from the `:explorer` application or returns a default if not set. + + ## Parameters + - `key`: The configuration key to look up. + - `default`: The default value to return if the configuration is not found. + - `module`: The module to look up the configuration for. Defaults to the + calling module. + + ## Returns + - The configuration value if found in the :explorer application settings, + otherwise the default value. + """ @spec config_or_default(atom(), term(), module()) :: term() def config_or_default(key, default, module \\ __MODULE__) do Application.get_env(:explorer, module, [])[key] || default diff --git a/apps/explorer/lib/explorer/microservice_interfaces/bens.ex b/apps/explorer/lib/explorer/microservice_interfaces/bens.ex index 78bf8005b1dd..160ddeb69e61 100644 --- a/apps/explorer/lib/explorer/microservice_interfaces/bens.ex +++ b/apps/explorer/lib/explorer/microservice_interfaces/bens.ex @@ -84,7 +84,7 @@ defmodule Explorer.MicroserviceInterfaces.BENS do Request for ENS name via GET {{baseUrl}}/api/v1/:chainId/domains:lookup """ @spec ens_domain_name_lookup(binary()) :: - nil | %{address_hash: binary(), expiry_date: any(), name: any(), names_count: integer()} + nil | %{address_hash: binary(), expiry_date: any(), name: any(), names_count: integer(), protocol: any()} def ens_domain_name_lookup(domain) do domain |> ens_domain_lookup() |> parse_lookup_response() end @@ -175,7 +175,12 @@ defmodule Explorer.MicroserviceInterfaces.BENS do %{ "items" => [ - %{"name" => name, "expiry_date" => expiry_date, "resolved_address" => %{"hash" => address_hash_string}} + %{ + "name" => name, + "expiry_date" => expiry_date, + "resolved_address" => %{"hash" => address_hash_string}, + "protocol" => protocol + } | _other ] = items }} @@ -186,7 +191,8 @@ defmodule Explorer.MicroserviceInterfaces.BENS do name: name, expiry_date: expiry_date, names_count: Enum.count(items), - address_hash: Address.checksum(hash) + address_hash: Address.checksum(hash), + protocol: protocol } end diff --git a/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex b/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex index cf7aa99203b6..2784581a26e2 100644 --- a/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex +++ b/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex @@ -17,6 +17,7 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do @tags_per_address_limit 5 @page_size 50 @request_error_msg "Error while sending request to Metadata microservice" + @service_disabled "Service is disabled" @doc """ Retrieves tags for a list of addresses. @@ -54,13 +55,17 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do """ @spec get_addresses(map()) :: {:error | integer(), any()} def get_addresses(params) do - with :ok <- Microservice.check_enabled(__MODULE__) do - params = - params - |> Map.put("page_size", @page_size) - |> Map.put("chain_id", Application.get_env(:block_scout_web, :chain_id)) + case Microservice.check_enabled(__MODULE__) do + :ok -> + params = + params + |> Map.put("page_size", @page_size) + |> Map.put("chain_id", Application.get_env(:block_scout_web, :chain_id)) + + http_get_request_for_proxy_method(addresses_url(), params, &prepare_addresses_response/1) - http_get_request_for_proxy_method(addresses_url(), params, &prepare_addresses_response/1) + _ -> + {501, %{error: @service_disabled}} end end @@ -84,7 +89,7 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do end defp http_get_request_for_proxy_method(url, params, parsing_function) do - case HTTPoison.get(url, [], params: params) do + case HTTPoison.get(url, [], params: params, recv_timeout: config()[:proxy_requests_timeout]) do {:ok, %Response{body: body, status_code: 200}} -> {200, body |> Jason.decode() |> parsing_function.()} @@ -120,6 +125,10 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do "#{Microservice.base_url(__MODULE__)}/api/v1" end + defp config do + Application.get_env(:explorer, __MODULE__) + end + @spec enabled?() :: boolean() def enabled?, do: Microservice.check_enabled(__MODULE__) == :ok diff --git a/apps/explorer/lib/explorer/microservice_interfaces/multichain_search.ex b/apps/explorer/lib/explorer/microservice_interfaces/multichain_search.ex new file mode 100644 index 000000000000..3365c282085c --- /dev/null +++ b/apps/explorer/lib/explorer/microservice_interfaces/multichain_search.ex @@ -0,0 +1,236 @@ +defmodule Explorer.MicroserviceInterfaces.MultichainSearch do + @moduledoc """ + Module to interact with Multichain search microservice + """ + + alias Ecto.Association.NotLoaded + alias Explorer.Chain.Cache.NetVersion + alias Explorer.Chain.{Address, Block, Hash, Transaction} + alias Explorer.Repo + alias Explorer.Utility.Microservice + alias HTTPoison.Response + + require Logger + + @addresses_chunk_size 7_000 + @max_concurrency 5 + @post_timeout :timer.minutes(5) + @request_error_msg "Error while sending request to Multichain Search Service" + + @doc """ + Performs a batch import of addresses, blocks, and transactions to the Multichain Search microservice. + + ## Parameters + - `params`: A map containing: + - `addresses`: List of address structs. + - `blocks`: List of block structs. + - `transactions`: List of transaction structs. + + ## Returns + - `{:ok, :service_disabled}`: If the integration with Multichain Search Service is disabled. + - `{:ok, result}`: If the import was successful. + - `{:error, reason}`: If an error occurred. + """ + @spec batch_import(%{ + addresses: [Address.t()], + blocks: [Block.t()], + transactions: [Transaction.t()] + }) :: {:error, :disabled | String.t() | Jason.DecodeError.t()} | {:ok, any()} + def batch_import(params) do + if enabled?() do + params_chunks = extract_batch_import_params_into_chunks(params) + + params_chunks + |> Task.async_stream( + fn body -> http_post_request(batch_import_url(), body) end, + max_concurrency: @max_concurrency, + timeout: @post_timeout + ) + |> Enum.reduce_while({:ok, :chunks_processed}, fn + {:ok, _result}, acc -> {:cont, acc} + {:exit, _reason}, _acc -> {:halt, {:error, @request_error_msg}} + {:error, _reason}, _acc -> {:halt, {:error, @request_error_msg}} + end) + else + {:ok, :service_disabled} + end + end + + defp http_post_request(url, body) do + headers = [{"Content-Type", "application/json"}] + + case HTTPoison.post(url, Jason.encode!(body), headers, recv_timeout: @post_timeout) do + {:ok, %Response{body: response_body, status_code: 200}} -> + response_body |> Jason.decode() + + {:ok, %Response{body: response_body, status_code: status_code}} -> + old_truncate = Application.get_env(:logger, :truncate) + Logger.configure(truncate: :infinity) + + Logger.error(fn -> + [ + "Error while sending request to microservice url: #{url}, ", + "status_code: #{inspect(status_code)}, ", + "response_body: #{inspect(response_body, limit: :infinity, printable_limit: :infinity)}, ", + "request_body: #{inspect(body |> Map.drop([:api_key]), limit: :infinity, printable_limit: :infinity)}" + ] + end) + + Logger.configure(truncate: old_truncate) + {:error, @request_error_msg} + + error -> + old_truncate = Application.get_env(:logger, :truncate) + Logger.configure(truncate: :infinity) + + Logger.error(fn -> + [ + "Error while sending request to microservice url: #{url}, request_body: #{inspect(body |> Map.drop([:api_key]), limit: :infinity, printable_limit: :infinity)}: ", + inspect(error, limit: :infinity, printable_limit: :infinity) + ] + end) + + Logger.configure(truncate: old_truncate) + {:error, @request_error_msg} + end + end + + defp extract_batch_import_params_into_chunks(%{ + addresses: raw_addresses, + blocks: blocks, + transactions: transactions + }) do + chain_id = NetVersion.get_version() + block_ranges = get_block_ranges(blocks) + + addresses = + raw_addresses + |> Repo.preload([:token, :smart_contract]) + |> Enum.map(fn address -> + %{ + hash: Hash.to_string(address.hash), + is_contract: !is_nil(address.contract_code), + is_verified_contract: address.verified, + is_token: token?(address.token), + ens_name: address.ens_domain_name, + token_name: get_token_name(address.token), + token_type: get_token_type(address.token), + contract_name: get_smart_contract_name(address.smart_contract) + } + end) + |> Enum.uniq() + + block_hashes = + blocks + |> Enum.map( + &%{ + hash: Hash.to_string(&1.hash), + hash_type: "BLOCK" + } + ) + + transaction_hashes = + transactions + |> Enum.map( + &%{ + hash: Hash.to_string(&1.hash), + hash_type: "TRANSACTION" + } + ) + + block_transaction_hashes = block_hashes ++ transaction_hashes + + indexed_addresses_chunks = + addresses + |> Enum.chunk_every(@addresses_chunk_size) + |> Enum.with_index() + + Enum.map(indexed_addresses_chunks, fn {addresses_chunk, index} -> + hashes = if index == 0, do: block_transaction_hashes, else: [] + + %{ + api_key: api_key(), + chain_id: to_string(chain_id), + addresses: addresses_chunk, + block_ranges: block_ranges, + hashes: hashes + } + end) + end + + defp token?(nil), do: false + + defp token?(%NotLoaded{}), do: false + + defp token?(_), do: true + + defp get_token_name(nil), do: nil + + defp get_token_name(%NotLoaded{}), do: nil + + defp get_token_name(token), do: token.name + + defp get_smart_contract_name(nil), do: nil + + defp get_smart_contract_name(%NotLoaded{}), do: nil + + defp get_smart_contract_name(smart_contract), do: smart_contract.name + + defp get_token_type(nil), do: "UNSPECIFIED" + + defp get_token_type(%NotLoaded{}), do: "UNSPECIFIED" + + defp get_token_type(token), do: token.type + + defp get_block_ranges([]), do: [] + + defp get_block_ranges(blocks) do + {min_block_number, max_block_number} = + blocks + |> Enum.map(& &1.number) + |> Enum.min_max() + + [ + %{ + min_block_number: to_string(min_block_number), + max_block_number: to_string(max_block_number) + } + ] + end + + defp batch_import_url do + "#{base_url()}/import:batch" + end + + defp base_url do + microservice_base_url = Microservice.base_url(__MODULE__) + + if microservice_base_url do + "#{microservice_base_url}/api/v1" + else + nil + end + end + + defp api_key do + Microservice.api_key(__MODULE__) + end + + @doc """ + Checks if the multichain search microservice is enabled. + + This function determines if the multichain search microservice is enabled by + checking if the base URL is not nil. + + ## Examples + + iex> Explorer.MicroserviceInterfaces.MultichainSearch.enabled?() + true + + iex> Explorer.MicroserviceInterfaces.MultichainSearch.enabled?() + false + + @return `true` if the base URL is not nil, `false` otherwise. + """ + def enabled?, do: !is_nil(base_url()) +end diff --git a/apps/explorer/lib/explorer/migrator/arbitrum_da_records_normalization.ex b/apps/explorer/lib/explorer/migrator/arbitrum_da_records_normalization.ex new file mode 100644 index 000000000000..759e1b1d2b9e --- /dev/null +++ b/apps/explorer/lib/explorer/migrator/arbitrum_da_records_normalization.ex @@ -0,0 +1,65 @@ +defmodule Explorer.Migrator.ArbitrumDaRecordsNormalization do + @moduledoc """ + Normalizes batch-to-blob associations by moving them from arbitrum_da_multi_purpose to a dedicated + arbitrum_batches_to_da_blobs table, establishing proper one-to-many relationships between batches + and data blobs. + """ + + use Explorer.Migrator.FillingMigration + + import Ecto.Query + + alias Explorer.Chain.Arbitrum.{BatchToDaBlob, DaMultiPurposeRecord} + alias Explorer.Chain.Cache.BackgroundMigrations + alias Explorer.Migrator.FillingMigration + alias Explorer.Repo + + @migration_name "arbitrum_da_records_normalization" + + @impl FillingMigration + def migration_name, do: @migration_name + + @impl FillingMigration + def last_unprocessed_identifiers(state) do + limit = batch_size() * concurrency() + + data_keys = + unprocessed_data_query() + |> select([rec], {rec.data_key, rec.batch_number}) + |> limit(^limit) + |> Repo.all(timeout: :infinity) + + {data_keys, state} + end + + @impl FillingMigration + def unprocessed_data_query do + # Finds batch-to-blob associations in arbitrum_da_multi_purpose that haven't been migrated yet + # to arbitrum_batches_to_da_blobs. Only considers records that have batch_number set. + from(rec in DaMultiPurposeRecord, + left_join: btd in BatchToDaBlob, + on: rec.data_key == btd.data_blob_id, + where: not is_nil(rec.batch_number) and is_nil(btd.batch_number) + ) + end + + @impl FillingMigration + def update_batch(data_keys) do + records = + Enum.map(data_keys, fn {data_key, batch_number} -> + %{ + batch_number: batch_number, + data_blob_id: data_key, + inserted_at: DateTime.utc_now(), + updated_at: DateTime.utc_now() + } + end) + + Repo.insert_all(BatchToDaBlob, records, timeout: :infinity) + end + + @impl FillingMigration + def update_cache do + BackgroundMigrations.set_arbitrum_da_records_normalization_finished(true) + end +end diff --git a/apps/explorer/lib/explorer/migrator/backfill_multichain_search_db.ex b/apps/explorer/lib/explorer/migrator/backfill_multichain_search_db.ex new file mode 100644 index 000000000000..67ed5897ff6a --- /dev/null +++ b/apps/explorer/lib/explorer/migrator/backfill_multichain_search_db.ex @@ -0,0 +1,195 @@ +defmodule Explorer.Migrator.BackfillMultichainSearchDB do + @moduledoc """ + Copies existing data from Blockscout instance to Multichain Search DB instance. + """ + + require Logger + + use Explorer.Migrator.FillingMigration + + alias Explorer.Chain.{Address, Block, InternalTransaction, TokenTransfer, Transaction} + alias Explorer.Chain.Cache.{BackgroundMigrations, BlockNumber} + alias Explorer.MicroserviceInterfaces.MultichainSearch + alias Explorer.Migrator.FillingMigration + + import Ecto.Query + + @migration_name "backfill_multichain_search_db" + + @failed_to_fetch_data_error "Failed to fetch data from the Blockscout DB for batch export to the Multichain Search DB" + @failed_to_export_data_error "Batch export to the Multichain Search DB failed" + @for " for block numbers " + + @impl FillingMigration + def migration_name, do: @migration_name + + @impl FillingMigration + def last_unprocessed_identifiers(%{"max_block_number" => -1} = state), do: {[], state} + + def last_unprocessed_identifiers(%{"max_block_number" => from_block_number} = state) do + limit = batch_size() * concurrency() + to_block_number = max(from_block_number - limit + 1, 0) + + {Enum.to_list(from_block_number..to_block_number), %{state | "max_block_number" => to_block_number - 1}} + end + + def last_unprocessed_identifiers(state) do + query = + from( + migration_status in MigrationStatus, + where: migration_status.migration_name == @migration_name, + select: migration_status.meta + ) + + meta = Repo.one(query, timeout: :infinity) + + state + |> Map.put("max_block_number", (meta && meta["max_block_number"]) || BlockNumber.get_max()) + |> last_unprocessed_identifiers() + end + + @impl FillingMigration + def unprocessed_data_query, do: nil + + @impl FillingMigration + def update_batch(block_numbers) do + blocks_query = from(block in Block, where: block.number in ^block_numbers) + + blocks_preloads = [:miner] + + blocks_task = + Task.async(fn -> + blocks_query + |> preload(^blocks_preloads) + |> Repo.all(timeout: :infinity) + end) + + case Task.yield(blocks_task, :infinity) do + {:ok, blocks} -> + transaction_preloads = [:from_address, :to_address, :created_contract_address] + + transactions_query = from(transaction in Transaction, where: transaction.block_number in ^block_numbers) + + transactions_task = + Task.async(fn -> + transactions_query + |> preload(^transaction_preloads) + |> Repo.all(timeout: :infinity) + end) + + block_hashes = blocks |> Enum.map(& &1.hash) + + internal_transactions_query = + from(internal_transaction in InternalTransaction, where: internal_transaction.block_hash in ^block_hashes) + + internal_transactions_task = + Task.async(fn -> + internal_transactions_query + |> preload(^transaction_preloads) + |> Repo.all(timeout: :infinity) + end) + + token_transfer_preloads = [:from_address, :to_address, :token_contract_address] + + token_transfers_query = + from(token_transfer in TokenTransfer, where: token_transfer.block_number in ^block_numbers) + + token_transfers_task = + Task.async(fn -> + token_transfers_query + |> preload(^token_transfer_preloads) + |> Repo.all(timeout: :infinity) + end) + + tasks = [ + transactions_task, + internal_transactions_task, + token_transfers_task + ] + + case tasks + |> Task.yield_many(:infinity) do + [ + {_transactions_task, {:ok, transactions}}, + {_internal_transactions_task, {:ok, internal_transactions}}, + {_token_transfers_task, {:ok, token_transfers}} + ] -> + addresses = + [ + transactions, + internal_transactions, + token_transfers, + blocks + ] + |> List.flatten() + |> Enum.reduce([], fn result, addresses_acc -> + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + extract_address_from_result(result) ++ addresses_acc + end) + |> Enum.uniq() + |> Enum.reject(&is_nil/1) + + to_import = %{ + addresses: addresses, + blocks: blocks, + transactions: transactions + } + + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + case MultichainSearch.batch_import(to_import) do + {:ok, _} = result -> + result + + {:error, _} -> + Logger.error(fn -> + ["#{@failed_to_export_data_error}", "#{@for}", "#{inspect(block_numbers)}"] + end) + + :timer.sleep(1000) + + update_batch(block_numbers) + end + + _ -> + repeat_block_numbers_processing_on_error(block_numbers) + end + + _ -> + repeat_block_numbers_processing_on_error(block_numbers) + end + end + + defp repeat_block_numbers_processing_on_error(block_numbers) do + Logger.error(fn -> + ["#{@failed_to_fetch_data_error}", "#{@for}", "#{inspect(block_numbers)}"] + end) + + :timer.sleep(1000) + + update_batch(block_numbers) + end + + @impl FillingMigration + def update_cache do + BackgroundMigrations.set_backfill_multichain_search_db_finished(true) + end + + @spec extract_address_from_result(Transaction.t() | InternalTransaction.t() | TokenTransfer.t() | Block.t()) :: [ + Address.t() + ] + defp extract_address_from_result(result) do + case result do + %Transaction{} -> + [result.from_address, result.to_address, result.created_contract_address] + + %InternalTransaction{} -> + [result.from_address, result.to_address, result.created_contract_address] + + %TokenTransfer{} -> + [result.from_address, result.to_address, result.token_contract_address] + + %Block{} -> + [result.miner] + end + end +end diff --git a/apps/explorer/lib/explorer/migrator/filling_migration.ex b/apps/explorer/lib/explorer/migrator/filling_migration.ex index 5d46272ca0c6..0c8785be6072 100644 --- a/apps/explorer/lib/explorer/migrator/filling_migration.ex +++ b/apps/explorer/lib/explorer/migrator/filling_migration.ex @@ -1,14 +1,156 @@ defmodule Explorer.Migrator.FillingMigration do @moduledoc """ - Template for creating migrations that fills some fields in existing entities + Provides a behaviour and implementation for data migration tasks that fill or update + fields in existing database entities or migrates data to another storages (e.g. + multichain search DB) + + This module defines a template for creating migrations that can process entities in + batches with parallel execution. It implements a GenServer that manages the + migration lifecycle and automatically saves migration progress regularly. + + Key features: + - Batch processing with configurable batch size + - Parallel execution with configurable concurrency + - State persistence and ability to automatically resume after interruption + - Integration with Explorer.Chain.Cache.BackgroundMigrations for status tracking + + ## Migration State Management + The migration's state is persisted in the database as part of the migration status + record. This allows migrations to resume from their last checkpoint after system + restarts or interruptions. The state is updated after each successful batch + processing. + + ## Cache Integration + The module integrates with Explorer.Chain.Cache.BackgroundMigrations, which + maintains an in-memory cache of migration completion statuses. This cache is + crucial for: + - Quick status checks during application startup + - Performance-critical operations that need to verify migration completion + - Avoiding frequent database queries for migration status + + ## Configuration + Modules using this behaviour can be configured in the application config: + + ```elixir + config :explorer, MyMigrationModule, + batch_size: 500, # Number of entities per batch (default: 500) + concurrency: 16, # Number of parallel tasks (default: 4 * schedulers_online) + timeout: 0 # Delay between batches in ms (default: 0) + ``` + + The migration process will: + 1. Start and check if already completed + 2. Execute pre-migration tasks via `before_start/0` + 3. Process entities in batches using parallel tasks + 4. Checkpoint progress after each batch in the database + 5. Execute post-migration tasks via `on_finish/0` + 6. Update completion status in both database and in-memory cache """ + @doc """ + Returns the name of the migration. The name is used to track the migration's status in + `Explorer.Migrator.MigrationStatus`. + """ @callback migration_name :: String.t() + + @doc """ + This callback defines a query to identify unprocessed entities. While defined as a + callback in the `FillingMigration` behaviour, it is not directly used by the + behaviour itself. Instead, it is called by `last_unprocessed_identifiers/1` in + modules implementing `FillingMigration` to build the query for retrieving + unprocessed entities. The query should not include any LIMIT clauses, as the + limiting is handled within `last_unprocessed_identifiers/1`. + """ @callback unprocessed_data_query :: Ecto.Query.t() | nil + + @doc """ + This callback retrieves the next batch of data for migration processing. It returns + a list of entity identifiers that have not yet been processed. The number of + identifiers returned should allow each migration task (limited by `concurrency()`) + to process no more than `batch_size()` entities. + + The callback is invoked periodically based on the timeout configuration parameter + specified in the application config for the module implementing the `FillingMigration` + behaviour. If the timeout is not specified, it defaults to 0. + + ## Parameters + - `state`: The current state of the migration process. + + ## Returns + A tuple containing: + - List of unprocessed entity identifiers + - Updated state map (or unchanged state if the identifiers did not trigger a state + change) + + The updated state map is stored in the database as part of the structure that + tracks the migration process. When the server restarts, the migration will + resume from the last saved state. + """ @callback last_unprocessed_identifiers(map()) :: {[any()], map()} + + @doc """ + This callback performs the migration for a batch of entities. After collecting + identifiers, the callback processes a batch of size `batch_size()`. A total of + `concurrency()` callbacks run in parallel as separate tasks, and the system + waits for all callbacks to complete. Since no timeout is specified for tasks + invoking this callback, implementations should complete within a reasonable + time period. + + After all callback tasks finish, the system schedules gathering of the next + batch of identifiers according to the timeout configuration parameter in the + application config for modules implementing the `FillingMigration` behaviour. + + ## Parameters + - `batch`: The list of identifiers to process. While this could theoretically + be a list of entities, using identifiers is preferred to minimize memory + usage during migration. + + ## Returns + N/A + """ @callback update_batch([any()]) :: any() + + @doc """ + This callback updates the migration completion status in the cache. + + The callback is invoked in two scenarios: + - When the migration is already marked as completed during process initialization + - When the migration finishes processing all entities + + The implementation updates the in-memory cache that tracks migration completion + status, which is used during application startup and by performance-critical + operations to quickly determine if specific data migrations have been completed. + Some migrations may not require cache updates if their completion status does not + affect system operations. + + ## Returns + N/A + """ @callback update_cache :: any() + + @doc """ + This callback executes custom logic after all migration batches have been processed. + + The callback runs just before the migration is marked as completed in the database. + Implementing modules can override this callback to perform any final cleanup or + post-migration tasks. The default implementation returns `:ignore`. + + ## Returns + - `:ignore` by default + """ @callback on_finish :: any() + + @doc """ + This callback executes custom logic when the migration process initializes. + + The callback runs after the migration is marked as "started" but before the first + batch processing begins. Implementing modules can override this callback to perform + any necessary setup or pre-migration tasks. The default implementation returns + `:ignore`. + + ## Returns + - `:ignore` by default + """ @callback before_start :: any() defmacro __using__(_opts) do @@ -28,6 +170,14 @@ defmodule Explorer.Migrator.FillingMigration do GenServer.start_link(__MODULE__, :ok, name: __MODULE__) end + @doc """ + Checks if the current migration has been completed. + + ## Returns + - `true` if the migration is completed + - `false` otherwise + """ + @spec migration_finished? :: boolean() def migration_finished? do MigrationStatus.get_status(migration_name()) == "completed" end @@ -37,6 +187,22 @@ defmodule Explorer.Migrator.FillingMigration do {:ok, %{}, {:continue, :ok}} end + # Called once when the GenServer starts to initialize the migration process by checking its + # current status and taking appropriate action. + # + # If the migration is already completed, updates the in-memory cache and stops normally. + # Otherwise, marks the migration as started, executes pre-migration tasks via + # before_start/0, and schedules the first batch with no delay. The migration process + # continues with the state that was saved during the previous run - this allows + # resuming long-running migrations from where they were interrupted. + # + # ## Parameters + # - `state`: The current state of the GenServer + # + # ## Returns + # - `{:stop, :normal, state}` if migration is completed + # - `{:noreply, state}` to continue with migration, where state is restored from the + # previous run or initialized as empty map @impl true def handle_continue(:ok, state) do case MigrationStatus.fetch(migration_name()) do @@ -47,11 +213,33 @@ defmodule Explorer.Migrator.FillingMigration do migration_status -> MigrationStatus.set_status(migration_name(), "started") before_start() - schedule_batch_migration() + schedule_batch_migration(0) {:noreply, (migration_status && migration_status.meta) || %{}} end end + # Processes a batch of unprocessed identifiers for migration. + # + # Retrieves the next batch of unprocessed identifiers and processes them in parallel. + # If no identifiers remain, executes cleanup tasks and completes the migration. + # Otherwise, processes the batch and continues migration. + # + # When identifiers are found, the function splits them into chunks and processes each + # chunk by spawning a task that calls update_batch. It waits for all tasks to complete + # with no timeout limit. After processing, it checkpoints the state to allow using it + # after restart, then schedules the next batch processing using the configured timeout + # from the application config (defaults to 0ms if not set). + # + # When no more identifiers are found, the function performs final cleanup by calling + # the optional on_finish callback, refreshes the in-memory cache via update_cache, + # and marks the migration as completed. + # + # ## Parameters + # - `state`: Current migration state containing progress information + # + # ## Returns + # - `{:stop, :normal, new_state}` when migration is complete + # - `{:noreply, new_state}` when more batches remain to be processed @impl true def handle_info(:migrate_batch, state) do case last_unprocessed_identifiers(state) do @@ -75,10 +263,19 @@ defmodule Explorer.Migrator.FillingMigration do end end + @spec run_task([any()]) :: any() defp run_task(batch), do: Task.async(fn -> update_batch(batch) end) - defp schedule_batch_migration do - Process.send(self(), :migrate_batch, []) + # Schedules the next batch migration by sending a delayed :migrate_batch message. + # + # ## Parameters + # - `timeout`: Optional delay in milliseconds before sending the message. If nil, + # uses the configured timeout from application config, defaulting to 0. + # + # ## Returns + # - Reference to the scheduled timer + defp schedule_batch_migration(timeout \\ nil) do + Process.send_after(self(), :migrate_batch, timeout || Application.get_env(:explorer, __MODULE__)[:timeout] || 0) end defp batch_size do diff --git a/apps/explorer/lib/explorer/migrator/migration_status.ex b/apps/explorer/lib/explorer/migrator/migration_status.ex index ac1fe8f5d141..0a311afb350c 100644 --- a/apps/explorer/lib/explorer/migrator/migration_status.ex +++ b/apps/explorer/lib/explorer/migrator/migration_status.ex @@ -6,6 +6,12 @@ defmodule Explorer.Migrator.MigrationStatus do alias Explorer.Repo + @typedoc """ + The structure of status of a migration: + * `migration_name` - The name of the migration. + * `status` - The status of the migration. + * `meta` - The meta data of the migration. + """ @primary_key false typed_schema "migrations_status" do field(:migration_name, :string, primary_key: true) @@ -73,6 +79,15 @@ defmodule Explorer.Migrator.MigrationStatus do end end + # Builds a query to filter migration status records by migration name. + # + # ## Parameters + # - `query`: The base query to build upon, defaults to the module itself + # - `migration_name`: The name of the migration to filter by + # + # ## Returns + # - An `Ecto.Query` that filters records where migration_name matches the provided value + @spec get_by_migration_name_query(Ecto.Queryable.t(), String.t()) :: Ecto.Query.t() defp get_by_migration_name_query(query \\ __MODULE__, migration_name) do from(ms in query, where: ms.migration_name == ^migration_name) end diff --git a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_nft_token_transfers.ex b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_nft_token_transfers.ex index 81eaa1ac5bba..cabcc65a675b 100644 --- a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_nft_token_transfers.ex +++ b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_nft_token_transfers.ex @@ -35,7 +35,7 @@ defmodule Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers do _ -> MigrationStatus.set_status(@migration_name, "started") - schedule_batch_migration() + schedule_batch_migration(0) {:noreply, %{step: :delete}} end end @@ -117,8 +117,8 @@ defmodule Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers do Block.set_refetch_needed(block_numbers) end - defp schedule_batch_migration do - Process.send(self(), :migrate_batch, []) + defp schedule_batch_migration(timeout \\ nil) do + Process.send_after(self(), :migrate_batch, timeout || Application.get_env(:explorer, __MODULE__)[:timeout]) end defp batch_size do diff --git a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex index 9bd33e3444a2..635305ccfc31 100644 --- a/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex +++ b/apps/explorer/lib/explorer/migrator/sanitize_incorrect_weth_token_transfers.ex @@ -35,7 +35,7 @@ defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers do _ -> MigrationStatus.set_status(@migration_name, "started") - schedule_batch_migration() + schedule_batch_migration(0) {:noreply, %{step: :delete_duplicates}} end end @@ -138,8 +138,8 @@ defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers do |> Repo.query!([], timeout: :infinity) end - defp schedule_batch_migration do - Process.send(self(), :migrate_batch, []) + defp schedule_batch_migration(timeout \\ nil) do + Process.send_after(self(), :migrate_batch, timeout || Application.get_env(:explorer, __MODULE__)[:timeout]) end defp batch_size do diff --git a/apps/explorer/lib/explorer/prometheus/collector/active_db_connections.ex b/apps/explorer/lib/explorer/prometheus/collector/active_db_connections.ex new file mode 100644 index 000000000000..24ea428651f0 --- /dev/null +++ b/apps/explorer/lib/explorer/prometheus/collector/active_db_connections.ex @@ -0,0 +1,50 @@ +defmodule Explorer.Prometheus.Collector.ActiveDbConnections do + @moduledoc """ + Custom collector to count number of currently active DB connections. + """ + + use Prometheus.Collector + + alias Prometheus.Model + + def collect_mf(_registry, callback) do + callback.(create_gauge(:active_db_connections, "Number of active DB connections", get_active_connections_count())) + end + + def collect_metrics(:active_db_connections, count) do + Model.gauge_metrics([{count}]) + end + + defp create_gauge(name, help, data) do + Model.create_mf(name, help, :gauge, __MODULE__, data) + end + + defp get_active_connections_count do + :explorer + |> Application.get_env(:ecto_repos) + |> Enum.reduce(0, fn repo, count -> + repo_count = + case Process.whereis(repo) do + nil -> + 0 + + _pid -> + # credo:disable-for-next-line Credo.Check.Design.AliasUsage + repo_params = Ecto.Repo.Registry.lookup(repo) + pool = repo_params.pid + pool_size = repo_params.opts[:pool_size] + ready_connections_count = get_ready_connections_count(pool) + + pool_size - ready_connections_count + end + + count + repo_count + end) + end + + defp get_ready_connections_count(pool) do + pool + |> DBConnection.get_connection_metrics() + |> Enum.reduce(0, fn %{ready_conn_count: ready_conn_count}, acc -> ready_conn_count + acc end) + end +end diff --git a/apps/explorer/lib/explorer/prometheus/instrumenter.ex b/apps/explorer/lib/explorer/prometheus/instrumenter.ex index 9eed93f68ffd..87d66dae64cc 100644 --- a/apps/explorer/lib/explorer/prometheus/instrumenter.ex +++ b/apps/explorer/lib/explorer/prometheus/instrumenter.ex @@ -13,6 +13,13 @@ defmodule Explorer.Prometheus.Instrumenter do help: "Block import stage, runner and step in runner processing time" ] + @histogram [ + name: :media_processing_time, + buckets: :default, + duration_unit: :seconds, + help: "Time in seconds taken for media resizing and uploading" + ] + @gauge [ name: :success_transactions_number, help: "Number of successful transactions in the period (default is 1 day)", @@ -56,6 +63,18 @@ defmodule Explorer.Prometheus.Instrumenter do registry: :public ] + @counter [ + name: :successfully_uploaded_media_number, + help: "Number of successfully uploaded media to CDN", + registry: :public + ] + + @counter [ + name: :failed_uploading_media_number, + help: "Number of failed uploading media to CDN", + registry: :public + ] + def block_import_stage_runner(function, stage, runner, step) do {time, result} = :timer.tc(function) @@ -68,6 +87,14 @@ defmodule Explorer.Prometheus.Instrumenter do Gauge.set([name: :success_transactions_number, registry: :public], number) end + def media_processing_time(seconds) do + Histogram.observe([name: :media_processing_time], seconds) + end + + def weekly_success_transactions_number(number) do + Gauge.set([name: :weekly_success_transactions_number, registry: :public], number) + end + def deployed_smart_contracts_number(number) do Gauge.set([name: :deployed_smart_contracts_number, registry: :public], number) end @@ -91,4 +118,12 @@ defmodule Explorer.Prometheus.Instrumenter do def simplified_active_addresses_number(number) do Gauge.set([name: :active_addresses_number, registry: :public], number) end + + def increment_successfully_uploaded_media_number do + Counter.inc(name: :successfully_uploaded_media_number, registry: :public) + end + + def increment_failed_uploading_media_number do + Counter.inc(name: :failed_uploading_media_number, registry: :public) + end end diff --git a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex index 908f90241a09..294e8fc80aca 100644 --- a/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex +++ b/apps/explorer/lib/explorer/smart_contract/solidity/publisher.ex @@ -382,32 +382,31 @@ defmodule Explorer.SmartContract.Solidity.Publisher do compiler_version = CompilerVersion.get_strict_compiler_version(:solc, params["compiler_version"]) - base_attributes = - %{ - address_hash: address_hash, - name: params["name"], - file_path: params["file_path"], - compiler_version: compiler_version, - evm_version: params["evm_version"], - optimization_runs: params["optimization_runs"], - optimization: params["optimization"], - contract_source_code: params["contract_source_code"], - constructor_arguments: clean_constructor_arguments, - external_libraries: prepared_external_libraries, - secondary_sources: params["secondary_sources"], - abi: abi, - verified_via_sourcify: params["verified_via_sourcify"] || false, - verified_via_eth_bytecode_db: params["verified_via_eth_bytecode_db"] || false, - verified_via_verifier_alliance: params["verified_via_verifier_alliance"] || false, - partially_verified: params["partially_verified"] || false, - is_vyper_contract: false, - autodetect_constructor_args: params["autodetect_constructor_args"], - is_yul: params["is_yul"] || false, - compiler_settings: clean_compiler_settings, - license_type: prepare_license_type(params["license_type"]) || :none, - is_blueprint: params["is_blueprint"] || false, - language: (is_nil(abi) && :yul) || :solidity - } + base_attributes = %{ + address_hash: address_hash, + name: params["name"], + file_path: params["file_path"], + compiler_version: compiler_version, + evm_version: params["evm_version"], + optimization_runs: params["optimization_runs"], + optimization: params["optimization"], + contract_source_code: params["contract_source_code"], + constructor_arguments: clean_constructor_arguments, + external_libraries: prepared_external_libraries, + secondary_sources: params["secondary_sources"], + abi: abi, + verified_via_sourcify: params["verified_via_sourcify"] || false, + verified_via_eth_bytecode_db: params["verified_via_eth_bytecode_db"] || false, + verified_via_verifier_alliance: params["verified_via_verifier_alliance"] || false, + partially_verified: params["partially_verified"] || false, + is_vyper_contract: false, + autodetect_constructor_args: params["autodetect_constructor_args"], + is_yul: params["is_yul"] || false, + compiler_settings: clean_compiler_settings, + license_type: prepare_license_type(params["license_type"]) || :none, + is_blueprint: params["is_blueprint"] || false, + language: (is_nil(abi) && :yul) || :solidity + } base_attributes |> (&if(Application.get_env(:explorer, :chain_type) == :zksync, diff --git a/apps/explorer/lib/explorer/third_party_integrations/noves_fi.ex b/apps/explorer/lib/explorer/third_party_integrations/noves_fi.ex index c51f590982d3..bede506841d5 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/noves_fi.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/noves_fi.ex @@ -79,11 +79,11 @@ defmodule Explorer.ThirdPartyIntegrations.NovesFi do end @doc """ - Noves.fi /evm/:chain/transactions/:address_hash endpoint + Noves.fi /evm/:chain/txs/:address_hash endpoint """ @spec address_transactions_url(String.t()) :: String.t() def address_transactions_url(address_hash_string) do - "#{base_url()}/evm/#{chain_name()}/transactions/#{address_hash_string}" + "#{base_url()}/evm/#{chain_name()}/txs/#{address_hash_string}" end defp base_url do diff --git a/apps/explorer/lib/explorer/token/instance_owner_reader.ex b/apps/explorer/lib/explorer/token/instance_owner_reader.ex deleted file mode 100644 index 52b74778649b..000000000000 --- a/apps/explorer/lib/explorer/token/instance_owner_reader.ex +++ /dev/null @@ -1,76 +0,0 @@ -defmodule Explorer.Token.InstanceOwnerReader do - @moduledoc """ - Reads Token Instance owner using Smart Contract function from the blockchain. - """ - - require Logger - - alias Explorer.SmartContract.Reader - - @owner_function_signature "6352211e" - - @owner_function_abi [ - %{ - "type" => "function", - "stateMutability" => "view", - "payable" => false, - "outputs" => [ - %{ - "type" => "address", - "name" => "owner" - } - ], - "name" => "ownerOf", - "inputs" => [ - %{ - "type" => "uint256", - "name" => "tokenId" - } - ] - } - ] - - @spec get_owner_of([%{token_contract_address_hash: String.t(), token_id: integer}]) :: [ - {:ok, String.t()} | {:error, String.t()} - ] - def get_owner_of(instance_owner_requests) do - instance_owner_requests - |> Enum.map(&format_owner_request/1) - |> Reader.query_contracts(@owner_function_abi) - |> Enum.zip(instance_owner_requests) - |> Enum.reduce([], fn {result, request}, acc -> - case format_owner_result(result, request) do - {:ok, ok_result} -> - [ok_result] ++ acc - - {:error, error_message} -> - Logger.error( - "Failed to get owner of token #{request.token_contract_address_hash}, token_id #{request.token_id}, reason: #{error_message}" - ) - - acc - end - end) - end - - defp format_owner_request(%{token_contract_address_hash: token_contract_address_hash, token_id: token_id}) do - %{ - contract_address: token_contract_address_hash, - method_id: @owner_function_signature, - args: [token_id] - } - end - - defp format_owner_result({:ok, [owner]}, request) do - {:ok, - %{ - token_contract_address_hash: request.token_contract_address_hash, - token_id: request.token_id, - owner: owner - }} - end - - defp format_owner_result({:error, error_message}, _request) do - {:error, error_message} - end -end diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index 9381fe9ff0ee..2a7e04fe02c8 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -14,7 +14,7 @@ defmodule Explorer.Token.MetadataRetriever do @no_uri_error "no uri" @vm_execution_error "VM execution error" @invalid_base64_data "invalid data:application/json;base64" - @default_headers [{"User-Agent", "blockscout-6.9.2"}] + @default_headers [{"User-Agent", "blockscout-6.10.1"}] # https://eips.ethereum.org/EIPS/eip-1155#metadata @erc1155_token_id_placeholder "{id}" @@ -356,6 +356,7 @@ defmodule Explorer.Token.MetadataRetriever do contract_functions |> handle_invalid_strings(contract_address_hash) |> handle_large_strings + |> limit_decimals end defp atomized_key(@name_signature), do: :name @@ -437,12 +438,39 @@ defmodule Explorer.Token.MetadataRetriever do defp handle_large_string(string, _size), do: string + defp limit_decimals(%{decimals: decimals} = contract_functions) do + if decimals > 78 do + %{contract_functions | decimals: nil} + else + contract_functions + end + end + + defp limit_decimals(contract_functions), do: contract_functions + defp remove_null_bytes(string) do String.replace(string, "\0", "") end + @doc """ + Generates an IPFS link for the given unique identifier (UID). + + ## Parameters + + - uid: The unique identifier for which the IPFS link is to be generated. + + ## Returns + + - A string representing the IPFS link for the given UID. + + ## Examples + + iex> ipfs_link("QmTzQ1N1z5Q1N1z5Q1N1z5Q1N1z5Q1N1z5Q1N1z5") + "https://ipfs.io/ipfs/QmTzQ1N1z5Q1N1z5Q1N1z5Q1N1z5Q1N1z5Q1N1z5" + + """ @spec ipfs_link(uid :: any()) :: String.t() - defp ipfs_link(uid) do + def ipfs_link(uid) do base_url = :indexer |> Application.get_env(:ipfs) @@ -467,8 +495,20 @@ defmodule Explorer.Token.MetadataRetriever do end end + @doc """ + Returns the headers required for making requests to IPFS. + + ## Examples + + iex> Explorer.Token.MetadataRetriever.ipfs_headers() + [ + {"User-Agent", "blockscout-6.9.0"}, + {"Authorization", "Bearer "} + ] + + """ @spec ipfs_headers() :: [{binary(), binary()}] - defp ipfs_headers do + def ipfs_headers do ipfs_params = Application.get_env(:indexer, :ipfs) if ipfs_params[:gateway_url_param_location] == :header do diff --git a/apps/explorer/lib/explorer/utility/microservice.ex b/apps/explorer/lib/explorer/utility/microservice.ex index ecdec3134e4b..744bbec70a70 100644 --- a/apps/explorer/lib/explorer/utility/microservice.ex +++ b/apps/explorer/lib/explorer/utility/microservice.ex @@ -28,7 +28,7 @@ defmodule Explorer.Utility.Microservice do @doc """ Returns :ok if Application.get_env(:explorer, module)[:enabled] is true (module is enabled) """ - @spec check_enabled(atom) :: :ok | {:error, :disabled} + @spec check_enabled(atom(), atom()) :: :ok | {:error, :disabled} def check_enabled(application \\ :explorer, module) do if Application.get_env(application, module)[:enabled] && base_url(application, module) do :ok @@ -36,4 +36,18 @@ defmodule Explorer.Utility.Microservice do {:error, :disabled} end end + + @doc """ + Retrieves the API key for the microservice. + + ## Examples + + iex> Explorer.Utility.Microservice.api_key() + "your_api_key_here" + + """ + @spec api_key(atom(), atom()) :: String.t() + def api_key(application \\ :explorer, module) do + Application.get_env(application, module)[:api_key] + end end diff --git a/apps/explorer/mix.exs b/apps/explorer/mix.exs index 9a76c73d8e0c..78c0a7318ffb 100644 --- a/apps/explorer/mix.exs +++ b/apps/explorer/mix.exs @@ -15,7 +15,7 @@ defmodule Explorer.Mixfile do plt_add_apps: ~w(ex_unit mix)a, ignore_warnings: "../../.dialyzer-ignore" ], - elixir: "~> 1.13", + elixir: "~> 1.17", elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", package: package(), @@ -24,7 +24,7 @@ defmodule Explorer.Mixfile do dialyzer: :test ], start_permanent: Mix.env() == :prod, - version: "6.9.2", + version: "6.10.1", xref: [exclude: [BlockScoutWeb.Routers.WebRouter.Helpers, Indexer.Helper]] ] end @@ -89,7 +89,7 @@ defmodule Explorer.Mixfile do {:math, "~> 0.7.0"}, {:mock, "~> 0.3.0", only: [:test], runtime: false}, {:mox, "~> 1.0"}, - {:phoenix_html, "== 3.0.4"}, + {:phoenix_html, "== 3.3.4"}, {:poison, "~> 4.0.1"}, {:nimble_csv, "~> 1.1"}, {:postgrex, ">= 0.0.0"}, diff --git a/apps/explorer/priv/account/migrations/20241128100836_remove_abused_custom_abis.exs b/apps/explorer/priv/account/migrations/20241128100836_remove_abused_custom_abis.exs new file mode 100644 index 000000000000..4c33d0c8ea38 --- /dev/null +++ b/apps/explorer/priv/account/migrations/20241128100836_remove_abused_custom_abis.exs @@ -0,0 +1,19 @@ +defmodule Explorer.Repo.Account.Migrations.RemoveAbusedCustomAbis do + use Ecto.Migration + + def up do + execute(""" + WITH ranked_abis AS (SELECT id, + identity_id, + ROW_NUMBER() OVER ( + PARTITION BY identity_id + ) as row_number + FROM account_custom_abis) + DELETE + FROM account_custom_abis + WHERE id IN (SELECT id + FROM ranked_abis + WHERE row_number > 15) + """) + end +end diff --git a/apps/explorer/priv/account/migrations/20241204093817_remove_abused_public_tags_request.exs b/apps/explorer/priv/account/migrations/20241204093817_remove_abused_public_tags_request.exs new file mode 100644 index 000000000000..65a0ab542753 --- /dev/null +++ b/apps/explorer/priv/account/migrations/20241204093817_remove_abused_public_tags_request.exs @@ -0,0 +1,19 @@ +defmodule Explorer.Repo.Account.Migrations.RemoveAbusedPublicTagsRequest do + use Ecto.Migration + + def up do + execute(""" + WITH ranked_public_tags_requests AS (SELECT id, + identity_id, + ROW_NUMBER() OVER ( + PARTITION BY identity_id + ) as row_number + FROM account_public_tags_requests) + DELETE + FROM account_public_tags_requests + WHERE id IN (SELECT id + FROM ranked_public_tags_requests + WHERE row_number > 15) + """) + end +end diff --git a/apps/explorer/priv/arbitrum/migrations/20241217155103_add_data_blobs_to_batches_table.exs b/apps/explorer/priv/arbitrum/migrations/20241217155103_add_data_blobs_to_batches_table.exs new file mode 100644 index 000000000000..a1a03458284c --- /dev/null +++ b/apps/explorer/priv/arbitrum/migrations/20241217155103_add_data_blobs_to_batches_table.exs @@ -0,0 +1,35 @@ +defmodule Explorer.Repo.Arbitrum.Migrations.AddDataBlobsToBatchesTable do + use Ecto.Migration + + def change do + create table(:arbitrum_batches_to_da_blobs, primary_key: false) do + add( + :batch_number, + references(:arbitrum_l1_batches, + column: :number, + on_delete: :delete_all, + on_update: :update_all, + type: :integer + ), + null: false, + primary_key: true + ) + + add( + :data_blob_id, + references(:arbitrum_da_multi_purpose, + column: :data_key, + on_delete: :delete_all, + on_update: :update_all, + type: :bytea + ), + null: false + ) + + timestamps(null: false, type: :utc_datetime_usec) + end + + # Create index for efficient lookups by data_blob_id + create(index(:arbitrum_batches_to_da_blobs, [:data_blob_id])) + end +end diff --git a/apps/explorer/priv/filecoin/migrations/20241220110454_add_filecoin_addresses_indexes.exs b/apps/explorer/priv/filecoin/migrations/20241220110454_add_filecoin_addresses_indexes.exs new file mode 100644 index 000000000000..331a0b1620bb --- /dev/null +++ b/apps/explorer/priv/filecoin/migrations/20241220110454_add_filecoin_addresses_indexes.exs @@ -0,0 +1,8 @@ +defmodule Explorer.Repo.Filecoin.Migrations.AddFilecoinAddressesIndexes do + use Ecto.Migration + + def change do + create(unique_index(:addresses, [:filecoin_robust])) + create(unique_index(:addresses, [:filecoin_id])) + end +end diff --git a/apps/explorer/priv/optimism/migrations/20241203113159_holocene_support.exs b/apps/explorer/priv/optimism/migrations/20241203113159_holocene_support.exs new file mode 100644 index 000000000000..fc23b0dcdd6f --- /dev/null +++ b/apps/explorer/priv/optimism/migrations/20241203113159_holocene_support.exs @@ -0,0 +1,14 @@ +defmodule Explorer.Repo.Optimism.Migrations.HoloceneSupport do + use Ecto.Migration + + def change do + create table(:op_eip1559_config_updates, primary_key: false) do + add(:l2_block_number, :bigint, null: false, primary_key: true) + add(:l2_block_hash, :bytea, null: false) + add(:base_fee_max_change_denominator, :integer, null: false) + add(:elasticity_multiplier, :integer, null: false) + + timestamps(null: false, type: :utc_datetime_usec) + end + end +end diff --git a/apps/explorer/priv/optimism/migrations/20241209101134_op_withdrawal_events_key.exs b/apps/explorer/priv/optimism/migrations/20241209101134_op_withdrawal_events_key.exs new file mode 100644 index 000000000000..026c8436cc4a --- /dev/null +++ b/apps/explorer/priv/optimism/migrations/20241209101134_op_withdrawal_events_key.exs @@ -0,0 +1,26 @@ +defmodule Explorer.Repo.Optimism.Migrations.OPWithdrawalEventsKey do + use Ecto.Migration + + def up do + execute("TRUNCATE TABLE op_withdrawal_events;") + + drop(constraint("op_withdrawal_events", "op_withdrawal_events_pkey")) + + alter table(:op_withdrawal_events) do + modify(:withdrawal_hash, :bytea, primary_key: true) + modify(:l1_event_type, :withdrawal_event_type, primary_key: true) + modify(:l1_transaction_hash, :bytea, primary_key: true) + end + end + + def down do + execute("TRUNCATE TABLE op_withdrawal_events;") + + drop(constraint("op_withdrawal_events", "op_withdrawal_events_pkey")) + + alter table(:op_withdrawal_events) do + modify(:withdrawal_hash, :bytea, primary_key: true) + modify(:l1_event_type, :withdrawal_event_type, primary_key: true) + end + end +end diff --git a/apps/explorer/priv/repo/migrations/20240708152519_add_nft_media_urls.exs b/apps/explorer/priv/repo/migrations/20240708152519_add_nft_media_urls.exs new file mode 100644 index 000000000000..f39aec152f97 --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20240708152519_add_nft_media_urls.exs @@ -0,0 +1,11 @@ +defmodule Explorer.Repo.Migrations.AddNFTMediaUrls do + use Ecto.Migration + + def change do + alter table(:token_instances) do + add(:thumbnails, :jsonb, null: true) + add(:media_type, :string, null: true) + add(:cdn_upload_error, :string, null: true) + end + end +end diff --git a/apps/explorer/priv/repo/migrations/20241219102223_remove_large_decimals.exs b/apps/explorer/priv/repo/migrations/20241219102223_remove_large_decimals.exs new file mode 100644 index 000000000000..fa65540e395a --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20241219102223_remove_large_decimals.exs @@ -0,0 +1,9 @@ +defmodule Explorer.Repo.Migrations.RemoveLargeDecimals do + use Ecto.Migration + + def change do + execute(""" + UPDATE tokens SET decimals = NULL WHERE decimals > 78; + """) + end +end diff --git a/apps/explorer/priv/zilliqa/migrations/20241027171945_modify_smart_contracts.exs b/apps/explorer/priv/zilliqa/migrations/20241027171945_modify_smart_contracts.exs new file mode 100644 index 000000000000..f181216a48f6 --- /dev/null +++ b/apps/explorer/priv/zilliqa/migrations/20241027171945_modify_smart_contracts.exs @@ -0,0 +1,17 @@ +defmodule Explorer.Repo.Zilliqa.Migrations.ModifySmartContracts do + use Ecto.Migration + + def up do + alter table(:smart_contracts) do + modify(:name, :string, null: true) + modify(:compiler_version, :string, null: true) + end + end + + def down do + alter table(:smart_contracts) do + modify(:name, :string, null: false) + modify(:compiler_version, :string, null: false) + end + end +end diff --git a/apps/explorer/test/explorer/account/notifier/email_test.exs b/apps/explorer/test/explorer/account/notifier/email_test.exs index 7d7b8d574fbb..387aab7e6fab 100644 --- a/apps/explorer/test/explorer/account/notifier/email_test.exs +++ b/apps/explorer/test/explorer/account/notifier/email_test.exs @@ -110,8 +110,6 @@ defmodule Explorer.Account.Notifier.EmailTest do "transaction_url" => "https://eth.blockscout.com/tx/0x5d5ff210261f1b2d6e4af22ea494f428f9997d4ab614a629d4f1390004b3e80d", "transaction_fee" => Decimal.new(210_000), - # todo: keep next line for compatibility with old version of SendGrid template. Remove it when the changes released and Sendgrid template updated. - "tx_fee" => Decimal.new(210_000), "username" => "John Snow" }, template_id: "d-666" diff --git a/apps/explorer/test/explorer/account/notifier/notify_test.exs b/apps/explorer/test/explorer/account/notifier/notify_test.exs index 1187837791fc..5968c16dace5 100644 --- a/apps/explorer/test/explorer/account/notifier/notify_test.exs +++ b/apps/explorer/test/explorer/account/notifier/notify_test.exs @@ -142,5 +142,33 @@ defmodule Explorer.Account.Notifier.NotifyTest do Application.put_env(:explorer, Explorer.Account, old_envs) end + + test "ignore events older than 24 hrs" do + wa = + %WatchlistAddress{address_hash: address_hash} = + build(:account_watchlist_address, watch_coin_input: true) + |> Repo.account_repo().insert!() + + _watchlist_address = Repo.preload(wa, watchlist: :identity) + + transaction = + %Transaction{ + from_address: _from_address, + to_address: _to_address, + block_number: _block_number, + hash: _transaction_hash + } = + with_block( + insert(:transaction, to_address: %Chain.Address{hash: address_hash}), + insert(:block, timestamp: DateTime.add(DateTime.utc_now(), -25, :hour)) + ) + + notify = Notify.call([transaction]) + + assert WatchlistNotification + |> Repo.account_repo().all() == [] + + assert notify == [nil] + end end end diff --git a/apps/explorer/test/explorer/account/notifier/summary_test.exs b/apps/explorer/test/explorer/account/notifier/summary_test.exs index 9cb52e021aa0..c39b728d3efc 100644 --- a/apps/explorer/test/explorer/account/notifier/summary_test.exs +++ b/apps/explorer/test/explorer/account/notifier/summary_test.exs @@ -86,22 +86,24 @@ defmodule Explorer.Account.Notifier.SummaryTest do test "ERC-20 Token transfer" do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) + transaction_amount = Wei.to(transaction.value, :ether) + transfer = %TokenTransfer{ amount: _amount, - block_number: block_number, + block_number: _block_number, from_address: from_address, to_address: to_address, token: token } = :token_transfer - |> insert(transaction: transaction) + |> insert(transaction: transaction, block: transaction.block, block_number: transaction.block_number) |> Repo.preload([ :token ]) @@ -114,7 +116,19 @@ defmodule Explorer.Account.Notifier.SummaryTest do amount = Decimal.div(transfer.amount, decimals) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: amount, block_number: block_number, @@ -135,32 +149,44 @@ defmodule Explorer.Account.Notifier.SummaryTest do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) - transfer = - %TokenTransfer{ - amount: _amount, - block_number: block_number, - from_address: from_address, - to_address: to_address - } = + transaction_amount = Wei.to(transaction.value, :ether) + + %TokenTransfer{ + amount: _amount, + block_number: _block_number, + from_address: from_address, + to_address: to_address + } = :token_transfer |> insert( transaction: transaction, token_ids: [42], - token_contract_address: token.contract_address + token_contract_address: token.contract_address, + block: transaction.block, + block_number: transaction.block_number ) - |> Repo.preload([ - :token - ]) {_, fee} = Transaction.fee(transaction, :gwei) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: 0, block_number: block_number, @@ -181,32 +207,44 @@ defmodule Explorer.Account.Notifier.SummaryTest do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) - transfer = - %TokenTransfer{ - amount: _amount, - block_number: block_number, - from_address: from_address, - to_address: to_address - } = + transaction_amount = Wei.to(transaction.value, :ether) + + %TokenTransfer{ + amount: _amount, + block_number: _block_number, + from_address: from_address, + to_address: to_address + } = :token_transfer |> insert( transaction: transaction, token_ids: [42], - token_contract_address: token.contract_address + token_contract_address: token.contract_address, + block: transaction.block, + block_number: transaction.block_number ) - |> Repo.preload([ - :token - ]) {_, fee} = Transaction.fee(transaction, :gwei) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: 0, block_number: block_number, @@ -227,32 +265,44 @@ defmodule Explorer.Account.Notifier.SummaryTest do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) - transfer = - %TokenTransfer{ - amount: _amount, - block_number: block_number, - from_address: from_address, - to_address: to_address - } = + transaction_amount = Wei.to(transaction.value, :ether) + + %TokenTransfer{ + amount: _amount, + block_number: _block_number, + from_address: from_address, + to_address: to_address + } = :token_transfer |> insert( transaction: transaction, token_ids: [23, 42], - token_contract_address: token.contract_address + token_contract_address: token.contract_address, + block: transaction.block, + block_number: transaction.block_number ) - |> Repo.preload([ - :token - ]) {_, fee} = Transaction.fee(transaction, :gwei) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: 0, block_number: block_number, @@ -273,16 +323,18 @@ defmodule Explorer.Account.Notifier.SummaryTest do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) + transaction_amount = Wei.to(transaction.value, :ether) + transfer = %TokenTransfer{ amount: _amount, - block_number: block_number, + block_number: _block_number, from_address: from_address, to_address: to_address } = @@ -290,11 +342,10 @@ defmodule Explorer.Account.Notifier.SummaryTest do |> insert( transaction: transaction, token_ids: [42], - token_contract_address: token.contract_address + token_contract_address: token.contract_address, + block: transaction.block, + block_number: transaction.block_number ) - |> Repo.preload([ - :token - ]) {_, fee} = Transaction.fee(transaction, :gwei) @@ -304,7 +355,19 @@ defmodule Explorer.Account.Notifier.SummaryTest do amount = Decimal.div(transfer.amount, decimals) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: amount, block_number: block_number, @@ -313,7 +376,7 @@ defmodule Explorer.Account.Notifier.SummaryTest do name: "Infinite Token", subject: "42", to_address_hash: to_address.hash, - transaction_hash: transaction.hash, + transaction_hash: transaction_hash, transaction_fee: fee, type: "ERC-404" } @@ -325,16 +388,18 @@ defmodule Explorer.Account.Notifier.SummaryTest do transaction = %Transaction{ - from_address: _from_address, - to_address: _to_address, - block_number: _block_number, - hash: _transaction_hash + from_address: transaction_from_address, + to_address: transaction_to_address, + block_number: block_number, + hash: transaction_hash } = with_block(insert(:transaction)) + transaction_amount = Wei.to(transaction.value, :ether) + transfer = %TokenTransfer{ amount: _amount, - block_number: block_number, + block_number: _block_number, from_address: from_address, to_address: to_address } = @@ -342,11 +407,10 @@ defmodule Explorer.Account.Notifier.SummaryTest do |> insert( transaction: transaction, token_ids: [], - token_contract_address: token.contract_address + token_contract_address: token.contract_address, + block: transaction.block, + block_number: transaction.block_number ) - |> Repo.preload([ - :token - ]) {_, fee} = Transaction.fee(transaction, :gwei) @@ -356,7 +420,19 @@ defmodule Explorer.Account.Notifier.SummaryTest do amount = Decimal.div(transfer.amount, decimals) - assert Summary.process(transfer) == [ + assert Summary.process(transaction) == [ + %Summary{ + amount: transaction_amount, + block_number: block_number, + from_address_hash: transaction_from_address.hash, + method: "transfer", + name: "ETH", + subject: "Coin transaction", + to_address_hash: transaction_to_address.hash, + transaction_hash: transaction_hash, + transaction_fee: fee, + type: "COIN" + }, %Summary{ amount: amount, block_number: block_number, diff --git a/apps/explorer/test/explorer/chain/cache/gas_price_oracle_test.exs b/apps/explorer/test/explorer/chain/cache/gas_price_oracle_test.exs index d048d056c2b4..66c69adc2885 100644 --- a/apps/explorer/test/explorer/chain/cache/gas_price_oracle_test.exs +++ b/apps/explorer/test/explorer/chain/cache/gas_price_oracle_test.exs @@ -137,6 +137,68 @@ defmodule Explorer.Chain.Cache.GasPriceOracleTest do }}, []} = GasPriceOracle.get_average_gas_price(2, 35, 60, 90) end + test "returns base fee only gas estimation with average block time if there is no recent transactions with non-zero gas price" do + average_block_time_old_env = Application.get_env(:explorer, AverageBlockTime) + + Application.put_env(:explorer, AverageBlockTime, enabled: true, cache_period: 1_800_000) + start_supervised!(AverageBlockTime) + + on_exit(fn -> + Application.put_env(:explorer, AverageBlockTime, average_block_time_old_env) + end) + + timestamp = ~U[2023-12-12 12:12:30.000000Z] + + block1 = + insert(:block, + number: 100, + hash: "0x3e51328bccedee581e8ba35190216a61a5d67fd91ca528f3553142c0c7d18391", + base_fee_per_gas: 100, + timestamp: timestamp + ) + + block2 = + insert(:block, + number: 101, + hash: "0x76c3da57334fffdc66c0d954dce1a910fcff13ec889a13b2d8b0b6e9440ce729", + base_fee_per_gas: 100, + timestamp: timestamp + ) + + :transaction + |> insert( + status: :ok, + block_hash: block1.hash, + block_number: block1.number, + cumulative_gas_used: 884_322, + gas_used: 106_025, + index: 0, + gas_price: 0, + hash: "0xac2a7dab94d965893199e7ee01649e2d66f0787a4c558b3118c09e80d4df8269" + ) + + :transaction + |> insert( + status: :ok, + block_hash: block2.hash, + block_number: block2.number, + cumulative_gas_used: 884_322, + gas_used: 106_025, + index: 0, + gas_price: 0, + hash: "0x5d5c2776f96704e7845f7d3c1fbba6685ab6efd6f82b6cd11d549f3b3a46bd03" + ) + + AverageBlockTime.refresh() + + assert {{:ok, + %{ + average: %{base_fee: 0.01, priority_fee: +0.0, price: 0.01, time: +0.0}, + fast: %{base_fee: 0.01, priority_fee: +0.0, price: 0.01, time: +0.0}, + slow: %{base_fee: 0.01, priority_fee: +0.0, price: 0.01, time: +0.0} + }}, []} = GasPriceOracle.get_average_gas_price(2, 35, 60, 90) + end + test "returns the same percentile values if gas price is the same over transactions" do block1 = insert(:block, number: 100, hash: "0x3e51328bccedee581e8ba35190216a61a5d67fd91ca528f3553142c0c7d18391") block2 = insert(:block, number: 101, hash: "0x76c3da57334fffdc66c0d954dce1a910fcff13ec889a13b2d8b0b6e9440ce729") diff --git a/apps/explorer/test/explorer/chain/celo/helper_test.exs b/apps/explorer/test/explorer/chain/celo/helper_test.exs new file mode 100644 index 000000000000..f4062cd6ad26 --- /dev/null +++ b/apps/explorer/test/explorer/chain/celo/helper_test.exs @@ -0,0 +1,4 @@ +defmodule Explorer.Chain.Celo.HelperTest do + use ExUnit.Case, async: true + doctest Explorer.Chain.Celo.Helper, import: true +end diff --git a/apps/explorer/test/explorer/chain/import/runner/blocks_test.exs b/apps/explorer/test/explorer/chain/import/runner/blocks_test.exs index 61ce2aa45c4d..5d0a9a841c6e 100644 --- a/apps/explorer/test/explorer/chain/import/runner/blocks_test.exs +++ b/apps/explorer/test/explorer/chain/import/runner/blocks_test.exs @@ -88,7 +88,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do test "coin balances are deleted and new balances are derived if some blocks lost consensus", %{consensus_block: %{number: block_number} = block, options: options} do - %{hash: address_hash} = address = insert(:address) + %{hash: address_hash} = address = insert(:address, fetched_coin_balance_block_number: block_number) prev_block_number = block_number - 1 @@ -101,7 +101,7 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do assert {:ok, %{ - delete_address_coin_balances: [^address_hash], + delete_address_coin_balances: [{^address_hash, ^block_number}], derive_address_fetched_coin_balances: [ %{ hash: ^address_hash, @@ -114,6 +114,33 @@ defmodule Explorer.Chain.Import.Runner.BlocksTest do assert %{value: ^prev_value, block_number: ^prev_block_number} = Repo.one(Address.CoinBalance) end + test "derive_address_fetched_coin_balances only updates addresses if its fetched_coin_balance_block_number lost consensus", + %{consensus_block: %{number: block_number} = block, options: options} do + %{hash: address_hash} = address = insert(:address, fetched_coin_balance_block_number: block_number) + address_1 = insert(:address, fetched_coin_balance_block_number: block_number + 2) + + prev_block_number = block_number - 1 + + insert(:address_coin_balance, address: address, block_number: block_number) + %{value: prev_value} = insert(:address_coin_balance, address: address, block_number: prev_block_number) + + insert(:address_coin_balance, address: address_1, block_number: block_number + 2) + + insert(:block, number: block_number, consensus: true) + + assert {:ok, + %{ + delete_address_coin_balances: [{^address_hash, ^block_number}], + derive_address_fetched_coin_balances: [ + %{ + hash: ^address_hash, + fetched_coin_balance: ^prev_value, + fetched_coin_balance_block_number: ^prev_block_number + } + ] + }} = run_block_consensus_change(block, true, options) + end + test "delete_address_current_token_balances deletes rows with matching block number when consensus is true", %{consensus_block: %{number: block_number} = block, options: options} do %Address.CurrentTokenBalance{address_hash: address_hash, token_contract_address_hash: token_contract_address_hash} = diff --git a/apps/explorer/test/explorer/chain/import/runner/tokens_test.exs b/apps/explorer/test/explorer/chain/import/runner/tokens_test.exs index e3b7d17dee74..67d6e47ad4aa 100644 --- a/apps/explorer/test/explorer/chain/import/runner/tokens_test.exs +++ b/apps/explorer/test/explorer/chain/import/runner/tokens_test.exs @@ -22,7 +22,7 @@ defmodule Explorer.Chain.Import.Runner.TokensTest do assert is_nil(holder_count) assert {:ok, %{tokens: [%Token{holder_count: ^holder_count}]}} = - run_changes(%{contract_address_hash: contract_address_hash, type: type, name: name}) + run_changes(%{contract_address_hash: contract_address_hash, type: type, name: name <> "name"}) end test "existing tokens without nil holder counter do have their holder_count change" do @@ -32,7 +32,7 @@ defmodule Explorer.Chain.Import.Runner.TokensTest do refute is_nil(holder_count) assert {:ok, %{tokens: [%Token{holder_count: ^holder_count}]}} = - run_changes(%{contract_address_hash: contract_address_hash, type: type, name: name}) + run_changes(%{contract_address_hash: contract_address_hash, type: type, name: name <> "name"}) end end diff --git a/apps/explorer/test/explorer/chain_test.exs b/apps/explorer/test/explorer/chain_test.exs index 8aef4e7aec12..695fdcd9ad19 100644 --- a/apps/explorer/test/explorer/chain_test.exs +++ b/apps/explorer/test/explorer/chain_test.exs @@ -26,7 +26,7 @@ defmodule Explorer.ChainTest do Wei } - alias Explorer.{Chain, Etherscan, TestHelper} + alias Explorer.{Chain, Etherscan} alias Explorer.Chain.Address.Counters alias Explorer.Chain.Cache.Block, as: BlockCache alias Explorer.Chain.Cache.Transaction, as: TransactionCache @@ -127,10 +127,10 @@ defmodule Explorer.ChainTest do assert {:ok, _, _} = Chain.last_db_block_status() end - test "return {:ok, last_block_period} if block is not in healthy period" do + test "return {:stale, _, _} if block is not in healthy period" do insert(:block, consensus: true, timestamp: Timex.shift(DateTime.utc_now(), hours: -50)) - assert {:error, _, _} = Chain.last_db_block_status() + assert {:stale, _, _} = Chain.last_db_block_status() end end @@ -141,10 +141,10 @@ defmodule Explorer.ChainTest do assert {:ok, _, _} = Chain.last_cache_block_status() end - test "return error if cache is stale" do + test "return {:stale, _, _} if cache is stale" do insert(:block, consensus: true, timestamp: Timex.shift(DateTime.utc_now(), hours: -50)) - assert {:error, _, _} = Chain.last_cache_block_status() + assert {:stale, _, _} = Chain.last_cache_block_status() end end diff --git a/apps/explorer/test/explorer/config_helper_test.exs b/apps/explorer/test/explorer/config_helper_test.exs new file mode 100644 index 000000000000..08fe9728ca8e --- /dev/null +++ b/apps/explorer/test/explorer/config_helper_test.exs @@ -0,0 +1,57 @@ +defmodule ConfigHelperTest do + use ExUnit.Case + + setup do + current_env_vars = System.get_env() + clear_env_variables() + + on_exit(fn -> + System.put_env(current_env_vars) + end) + end + + describe "parse_urls_list/3" do + test "common case" do + System.put_env("ETHEREUM_JSONRPC_HTTP_URLS", "test") + assert ConfigHelper.parse_urls_list(:http) == ["test"] + end + + test "using defined default" do + System.put_env("ETHEREUM_JSONRPC_HTTP_URL", "test") + refute System.get_env("ETHEREUM_JSONRPC_ETH_CALL_URLS") + refute System.get_env("ETHEREUM_JSONRPC_ETH_CALL_URL") + assert ConfigHelper.parse_urls_list(:eth_call) == ["test"] + end + + test "using defined fallback default" do + System.put_env("ETHEREUM_JSONRPC_FALLBACK_HTTP_URL", "test") + refute System.get_env("ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS") + refute System.get_env("ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL") + + assert ConfigHelper.parse_urls_list(:fallback_eth_call) == ["test"] + end + + test "base http urls are used if fallback is not provided" do + System.put_env("ETHEREUM_JSONRPC_HTTP_URL", "test") + refute System.get_env("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS") + refute System.get_env("ETHEREUM_JSONRPC_FALLBACK_TRACE_URL") + + assert ConfigHelper.parse_urls_list(:fallback_trace) == ["test"] + end + end + + defp clear_env_variables do + System.delete_env("ETHEREUM_JSONRPC_HTTP_URLS") + System.delete_env("ETHEREUM_JSONRPC_HTTP_URL") + System.delete_env("ETHEREUM_JSONRPC_TRACE_URLS") + System.delete_env("ETHEREUM_JSONRPC_TRACE_URL") + System.delete_env("ETHEREUM_JSONRPC_ETH_CALL_URLS") + System.delete_env("ETHEREUM_JSONRPC_ETH_CALL_URL") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_HTTP_URL") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_TRACE_URL") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS") + System.delete_env("ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL") + end +end diff --git a/apps/explorer/test/explorer/market/history/cataloger_test.exs b/apps/explorer/test/explorer/market/history/cataloger_test.exs index 040033b70248..717be6bbad9e 100644 --- a/apps/explorer/test/explorer/market/history/cataloger_test.exs +++ b/apps/explorer/test/explorer/market/history/cataloger_test.exs @@ -2,7 +2,7 @@ defmodule Explorer.Market.History.CatalogerTest do use Explorer.DataCase, async: false import Mox - import Ecto.Query, only: [order_by: 2] + import Ecto.Query, only: [limit: 2, order_by: 2] alias Explorer.Market.MarketHistory alias Explorer.Market.History.Cataloger @@ -222,7 +222,7 @@ defmodule Explorer.Market.History.CatalogerTest do %Explorer.Market.MarketHistory{ date: ~D[2018-04-02] } = second_entry - ] = MarketHistory |> order_by(asc: :date) |> Repo.all() + ] = MarketHistory |> order_by(asc: :date) |> limit(2) |> Repo.all() assert Decimal.eq?(first_entry.closing_price, Decimal.new(10)) assert Decimal.eq?(second_entry.closing_price, Decimal.new(20)) diff --git a/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs b/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs index b2f4be309b68..07bcb94f3cd5 100644 --- a/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs +++ b/apps/explorer/test/explorer/migrator/sanitize_incorrect_weth_token_transfers_test.exs @@ -115,7 +115,8 @@ defmodule Explorer.Migrator.SanitizeIncorrectWETHTokenTransfersTest do Application.put_env(:explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, batch_size: 1, - concurrency: 1 + concurrency: 1, + timeout: 0 ) SanitizeIncorrectWETHTokenTransfers.start_link([]) diff --git a/apps/indexer/README.md b/apps/indexer/README.md index 5a22018299a6..d466c9fd15e9 100644 --- a/apps/indexer/README.md +++ b/apps/indexer/README.md @@ -104,9 +104,9 @@ Additionally: These workers are created for fetching information, which previously wasn't fetched in existing fetchers, or was fetched incorrectly. After all deployed instances get all needed data, these fetchers should be deprecated and removed. -- `uncataloged_token_transfers`: extracts token transfers from logs, which previously weren't parsed due to unknown format +- `uncataloged_token_transfers`: extracts token transfers from logs, which weren't parsed due to an unknown format - `uncles_without_index`: adds previously unfetched `index` field for unfetched blocks in `block_second_degree_relations` -- `blocks_transactions_mismatch`: refetches each block once and revokes consensus to those whose transaction number mismatches with the number currently stored. This is meant to force the correction of a race condition that caused successfully fetched transactions to be overwritten by a following non-consensus block: [#1911](https://github.com/blockscout/blockscout/issues/1911). +- `blocks_transactions_mismatch`: refetches each block once and revokes consensus for those whose transaction number mismatches with the number currently stored. This is meant to force the correction of a race condition that caused successfully fetched transactions to be overwritten by a following non-consensus block: [#1911](https://github.com/blockscout/blockscout/issues/1911). ## Memory Usage diff --git a/apps/indexer/config/dev/besu.exs b/apps/indexer/config/dev/besu.exs index 0efe225df124..97efb8820945 100644 --- a/apps/indexer/config/dev/besu.exs +++ b/apps/indexer/config/dev/besu.exs @@ -18,29 +18,12 @@ config :indexer, else: EthereumJSONRPC.IPC, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/dev/erigon.exs b/apps/indexer/config/dev/erigon.exs index a819283d3ce9..0f526649cdf9 100644 --- a/apps/indexer/config/dev/erigon.exs +++ b/apps/indexer/config/dev/erigon.exs @@ -18,29 +18,12 @@ config :indexer, else: EthereumJSONRPC.IPC, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/dev/filecoin.exs b/apps/indexer/config/dev/filecoin.exs index bed9eea600de..6701a698480f 100644 --- a/apps/indexer/config/dev/filecoin.exs +++ b/apps/indexer/config/dev/filecoin.exs @@ -17,33 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_HTTP_URLS", - "ETHEREUM_JSONRPC_HTTP_URL", - "http://localhost:1234/rpc/v1" - ), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:1234/rpc/v1" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:1234/rpc/v1" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:1234/rpc/v1"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:1234/rpc/v1"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:1234/rpc/v1"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, trace_block: :trace diff --git a/apps/indexer/config/dev/ganache.exs b/apps/indexer/config/dev/ganache.exs index d66ded540fd9..6360d8141875 100644 --- a/apps/indexer/config/dev/ganache.exs +++ b/apps/indexer/config/dev/ganache.exs @@ -17,21 +17,10 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:7545"), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:7545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:7545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:7545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call ], diff --git a/apps/indexer/config/dev/geth.exs b/apps/indexer/config/dev/geth.exs index 82fde60069cf..3e0557898b02 100644 --- a/apps/indexer/config/dev/geth.exs +++ b/apps/indexer/config/dev/geth.exs @@ -17,29 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, debug_traceTransaction: :trace, diff --git a/apps/indexer/config/dev/nethermind.exs b/apps/indexer/config/dev/nethermind.exs index 0f59e2c9658b..a4788dcc7d1f 100644 --- a/apps/indexer/config/dev/nethermind.exs +++ b/apps/indexer/config/dev/nethermind.exs @@ -18,29 +18,12 @@ config :indexer, else: EthereumJSONRPC.IPC, transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/dev/rsk.exs b/apps/indexer/config/dev/rsk.exs index c4a0d8434815..012b0f6d0eea 100644 --- a/apps/indexer/config/dev/rsk.exs +++ b/apps/indexer/config/dev/rsk.exs @@ -19,29 +19,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:8545"), - trace_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_TRACE_URLS", - "ETHEREUM_JSONRPC_TRACE_URL", - "http://localhost:8545" - ), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:8545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:8545"), + trace_urls: ConfigHelper.parse_urls_list(:trace, "http://localhost:8545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:8545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/prod/besu.exs b/apps/indexer/config/prod/besu.exs index b78e3c611cda..03c2c8ff4601 100644 --- a/apps/indexer/config/prod/besu.exs +++ b/apps/indexer/config/prod/besu.exs @@ -17,18 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/prod/erigon.exs b/apps/indexer/config/prod/erigon.exs index 0686e62d3ef8..02367bfc476d 100644 --- a/apps/indexer/config/prod/erigon.exs +++ b/apps/indexer/config/prod/erigon.exs @@ -17,18 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/prod/filecoin.exs b/apps/indexer/config/prod/filecoin.exs index 749c6a8fbbaa..b5102b1a3fa6 100644 --- a/apps/indexer/config/prod/filecoin.exs +++ b/apps/indexer/config/prod/filecoin.exs @@ -17,18 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, trace_block: :trace diff --git a/apps/indexer/config/prod/ganache.exs b/apps/indexer/config/prod/ganache.exs index d66ded540fd9..6360d8141875 100644 --- a/apps/indexer/config/prod/ganache.exs +++ b/apps/indexer/config/prod/ganache.exs @@ -17,21 +17,10 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL", "http://localhost:7545"), - eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_ETH_CALL_URL", - "http://localhost:7545" - ), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http, "http://localhost:7545"), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call, "http://localhost:7545"), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call ], diff --git a/apps/indexer/config/prod/geth.exs b/apps/indexer/config/prod/geth.exs index 22a2e84365f4..291683ef9eda 100644 --- a/apps/indexer/config/prod/geth.exs +++ b/apps/indexer/config/prod/geth.exs @@ -17,18 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, debug_traceTransaction: :trace, diff --git a/apps/indexer/config/prod/nethermind.exs b/apps/indexer/config/prod/nethermind.exs index 43f71bbae02a..d5b0ce79d9d4 100644 --- a/apps/indexer/config/prod/nethermind.exs +++ b/apps/indexer/config/prod/nethermind.exs @@ -17,18 +17,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/config/prod/rsk.exs b/apps/indexer/config/prod/rsk.exs index b710d1347416..7d7bc9ff4b25 100644 --- a/apps/indexer/config/prod/rsk.exs +++ b/apps/indexer/config/prod/rsk.exs @@ -19,18 +19,12 @@ config :indexer, ), transport_options: [ http: EthereumJSONRPC.HTTP.HTTPoison, - urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"), - trace_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"), - eth_call_urls: ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"), - fallback_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"), - fallback_trace_urls: - ConfigHelper.parse_urls_list("ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"), - fallback_eth_call_urls: - ConfigHelper.parse_urls_list( - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", - "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL" - ), + urls: ConfigHelper.parse_urls_list(:http), + trace_urls: ConfigHelper.parse_urls_list(:trace), + eth_call_urls: ConfigHelper.parse_urls_list(:eth_call), + fallback_urls: ConfigHelper.parse_urls_list(:fallback_http), + fallback_trace_urls: ConfigHelper.parse_urls_list(:fallback_trace), + fallback_eth_call_urls: ConfigHelper.parse_urls_list(:fallback_eth_call), method_to_url: [ eth_call: :eth_call, eth_getBalance: :trace, diff --git a/apps/indexer/lib/indexer/application.ex b/apps/indexer/lib/indexer/application.ex index 5851e73adb4d..2f5e4fe24427 100644 --- a/apps/indexer/lib/indexer/application.ex +++ b/apps/indexer/lib/indexer/application.ex @@ -61,7 +61,11 @@ defmodule Indexer.Application do name: Indexer.Application ] - Supervisor.start_link(children, opts) + if Application.get_env(:nft_media_handler, :standalone_media_worker?) do + Supervisor.start_link([], opts) + else + Supervisor.start_link(children, opts) + end end defp token_instance_fetcher_pool_size(fetcher, nil) do diff --git a/apps/indexer/lib/indexer/block/fetcher.ex b/apps/indexer/lib/indexer/block/fetcher.ex index afc5f4f676fa..84dd017893c9 100644 --- a/apps/indexer/lib/indexer/block/fetcher.ex +++ b/apps/indexer/lib/indexer/block/fetcher.ex @@ -17,6 +17,7 @@ defmodule Indexer.Block.Fetcher do alias Explorer.Chain.Cache.{Accounts, BlockNumber, Transactions, Uncles} alias Explorer.Chain.Filecoin.PendingAddressOperation, as: FilecoinPendingAddressOperation alias Explorer.Chain.{Address, Block, Hash, Import, Transaction, Wei} + alias Explorer.MicroserviceInterfaces.MultichainSearch alias Indexer.Block.Fetcher.Receipts alias Indexer.Fetcher.Arbitrum.MessagesToL2Matcher, as: ArbitrumMessagesToL2Matcher alias Indexer.Fetcher.Celo.EpochBlockOperations, as: CeloEpochBlockOperations @@ -281,6 +282,12 @@ defmodule Indexer.Block.Fetcher do update_uncles_cache(inserted[:block_second_degree_relations]) update_withdrawals_cache(inserted[:withdrawals]) + update_multichain_search_db(%{ + addresses: inserted[:addresses], + blocks: inserted[:blocks], + transactions: inserted[:transactions] + }) + async_match_arbitrum_messages_to_l2(arbitrum_transactions_for_further_handling) result @@ -411,6 +418,14 @@ defmodule Indexer.Block.Fetcher do :ok end + defp update_multichain_search_db(%{addresses: addresses, blocks: blocks, transactions: transactions}) do + MultichainSearch.batch_import(%{ + addresses: addresses || [], + blocks: blocks || [], + transactions: transactions || [] + }) + end + def import( %__MODULE__{broadcast: broadcast, callback_module: callback_module} = state, options @@ -492,9 +507,17 @@ defmodule Indexer.Block.Fetcher do block_number: block_number, hash: hash, created_contract_address_hash: %Hash{} = created_contract_address_hash, - created_contract_code_indexed_at: nil + created_contract_code_indexed_at: nil, + type: type } -> - [%{block_number: block_number, hash: hash, created_contract_address_hash: created_contract_address_hash}] + [ + %{ + block_number: block_number, + hash: hash, + created_contract_address_hash: created_contract_address_hash, + type: type + } + ] %Transaction{created_contract_address_hash: nil} -> [] diff --git a/apps/indexer/lib/indexer/block/realtime/fetcher.ex b/apps/indexer/lib/indexer/block/realtime/fetcher.ex index 59fef6de6256..b165eb584263 100644 --- a/apps/indexer/lib/indexer/block/realtime/fetcher.ex +++ b/apps/indexer/lib/indexer/block/realtime/fetcher.ex @@ -310,10 +310,12 @@ defmodule Indexer.Block.Realtime.Fetcher do case @chain_type do :optimism -> - # Removes all rows from `op_transaction_batches` and `op_withdrawals` tables - # previously written starting from the reorg block number + # Removes all rows from `op_transaction_batches`, `op_withdrawals`, + # and `op_eip1559_config_updates` tables previously written starting + # from the reorg block number defp remove_assets_by_number(reorg_block) do - # credo:disable-for-lines:2 Credo.Check.Design.AliasUsage + # credo:disable-for-lines:3 Credo.Check.Design.AliasUsage + Indexer.Fetcher.Optimism.EIP1559ConfigUpdate.handle_realtime_l2_reorg(reorg_block) Indexer.Fetcher.Optimism.TransactionBatch.handle_l2_reorg(reorg_block) Indexer.Fetcher.Optimism.Withdrawal.remove(reorg_block) end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/da/anytrust.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/da/anytrust.ex index 40459db704f5..b64441cf38ae 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/da/anytrust.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/da/anytrust.ex @@ -4,12 +4,15 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do within the Arbitrum rollup context. """ + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_error: 1, log_info: 1, log_debug: 1] import Explorer.Helper, only: [decode_data: 2] - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Db.Settlement, as: Db alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Indexer.Helper, as: IndexerHelper alias Explorer.Chain.Arbitrum @@ -83,10 +86,6 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do :pubkeys => [signer()] } - # keccak256("SetValidKeyset(bytes32,bytes)") - @set_valid_keyset_event "0xabca9b7986bc22ad0160eb0cb88ae75411eacfba4052af0b457a9335ef655722" - @set_valid_keyset_event_unindexed_params [:bytes] - @doc """ Parses batch accompanying data to extract AnyTrust data availability information. @@ -149,31 +148,32 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do end @doc """ - Prepares AnyTrust data availability information for import. - - This function prepares a list of data structures for import into the database, - ensuring that AnyTrust DA information and related keysets are included. It - verifies if the keyset associated with the AnyTrust DA certificate is already - known or needs to be fetched from L1. + Transforms AnyTrust data availability information into database-ready records. - To avoid fetching the same keyset multiple times, the function uses a cache. + Creates database records for both the DA certificate and its association with a batch + number. Additionally checks if the certificate's keyset is already known or needs to + be fetched from L1. ## Parameters - - `source`: The initial list of data to be imported. - - `da_info`: The AnyTrust DA info struct containing details about the data blob. - - `l1_connection_config`: A map containing the address of the Sequencer Inbox contract - and configuration parameters for the JSON RPC connection. - - `cache`: A set of unique elements used to cache the checked keysets. + - A tuple containing: + - A list of existing DA records (`DaMultiPurposeRecord`) + - A list of existing batch-to-blob associations (`BatchToDaBlob`) + - `da_info`: The AnyTrust DA info struct containing the certificate data + - `l1_connection_config`: Configuration for L1 connection, including: + - `:sequencer_inbox_address`: Address of the Sequencer Inbox contract + - `:json_rpc_named_arguments`: JSON RPC connection parameters + - `cache`: A set of previously processed keyset hashes ## Returns - A tuple containing: - - An updated list of data structures ready for import, including the DA - certificate (`data_type` is `0`) and potentially a new keyset (`data_type` - is `1`) if required. - - The updated cache with the checked keysets. + - A tuple of updated record lists: + - DA records list with the new certificate (`data_type: 0`) and possibly + a new keyset (`data_type: 1`) + - Batch-to-blob associations list with the new mapping + - Updated keyset cache """ @spec prepare_for_import( - list(), + {[Arbitrum.DaMultiPurposeRecord.to_import()], [Arbitrum.BatchToDaBlob.to_import()]}, __MODULE__.t(), %{ :sequencer_inbox_address => String.t(), @@ -181,8 +181,8 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do }, MapSet.t() ) :: - {[Arbitrum.DaMultiPurposeRecord.to_import()], MapSet.t()} - def prepare_for_import(source, %__MODULE__{} = da_info, l1_connection_config, cache) do + {{[Arbitrum.DaMultiPurposeRecord.to_import()], [Arbitrum.BatchToDaBlob.to_import()]}, MapSet.t()} + def prepare_for_import({da_records_acc, batch_to_blob_acc}, %__MODULE__{} = da_info, l1_connection_config, cache) do data = %{ keyset_hash: ArbitrumHelper.bytes_to_hex_str(da_info.keyset_hash), data_hash: ArbitrumHelper.bytes_to_hex_str(da_info.data_hash), @@ -191,20 +191,29 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do bls_signature: ArbitrumHelper.bytes_to_hex_str(da_info.bls_signature) } - res = [ - %{ - data_type: 0, - data_key: da_info.data_hash, - data: data, - batch_number: da_info.batch_number - } - ] + # Create `DaMultiPurposeRecord` record + da_record = %{ + data_type: 0, + data_key: da_info.data_hash, + data: data, + # This field must be removed as soon as migration to a separate table for Batch-to-DA-record associations is completed. + batch_number: nil + } + + # Create `BatchToDaBlob` record + batch_to_blob_record = %{ + batch_number: da_info.batch_number, + data_blob_id: da_info.data_hash + } {check_result, keyset_map, updated_cache} = check_if_new_keyset(da_info.keyset_hash, l1_connection_config, cache) - updated_res = + da_records = case check_result do :new_keyset -> + # If the keyset is new, add a new keyset record to the DA records list. + # As per the nature of `DaMultiPurposeRecord` it can contain not only DA + # certificates but also keysets. [ %{ data_type: 1, @@ -212,14 +221,14 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do data: keyset_map, batch_number: nil } - | res + | [da_record] ] _ -> - res + [da_record] end - {updated_res ++ source, updated_cache} + {{da_records ++ da_records_acc, [batch_to_blob_record | batch_to_blob_acc]}, updated_cache} end # Verifies the existence of an AnyTrust committee keyset in the database and fetches it from L1 if not found. @@ -323,7 +332,7 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do block_number, block_number, sequencer_inbox_address, - [@set_valid_keyset_event, ArbitrumHelper.bytes_to_hex_str(keyset_hash)], + [ArbitrumEvents.set_valid_keyset(), ArbitrumHelper.bytes_to_hex_str(keyset_hash)], json_rpc_named_arguments ) @@ -338,7 +347,7 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Anytrust do end defp set_valid_keyset_event_parse(event) do - [keyset_data] = decode_data(event["data"], @set_valid_keyset_event_unindexed_params) + [keyset_data] = decode_data(event["data"], ArbitrumEvents.set_valid_keyset_unindexed_params()) keyset_data end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/da/celestia.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/da/celestia.ex index e20db6ecd979..49f2ec76e670 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/da/celestia.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/da/celestia.ex @@ -87,28 +87,45 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Celestia do Prepares Celestia Blob data for import. ## Parameters - - `source`: The initial list of data to be imported. + - A tuple containing: + - A list of DA records. + - A list of Batch-to-DA-record associations. - `da_info`: The Celestia blob descriptor struct containing details about the data blob. ## Returns - - An updated list of data structures ready for import, including the Celestia blob descriptor. + - A tuple containing: + - An updated list of `DaMultiPurposeRecord` structures ready for import in the DB. + - An updated list of `BatchToDaBlob` structures ready for import in the DB. """ - @spec prepare_for_import(list(), __MODULE__.t()) :: [Arbitrum.DaMultiPurposeRecord.to_import()] - def prepare_for_import(source, %__MODULE__{} = da_info) do + @spec prepare_for_import( + {[Arbitrum.DaMultiPurposeRecord.to_import()], [Arbitrum.BatchToDaBlob.to_import()]}, + __MODULE__.t() + ) :: + {[Arbitrum.DaMultiPurposeRecord.to_import()], [Arbitrum.BatchToDaBlob.to_import()]} + def prepare_for_import({da_records_acc, batch_to_blob_acc}, %__MODULE__{} = da_info) do data = %{ height: da_info.height, transaction_commitment: ArbitrumHelper.bytes_to_hex_str(da_info.transaction_commitment), raw: ArbitrumHelper.bytes_to_hex_str(da_info.raw) } - [ - %{ - data_type: 0, - data_key: calculate_celestia_data_key(da_info.height, da_info.transaction_commitment), - data: data, - batch_number: da_info.batch_number - } - | source - ] + data_key = calculate_celestia_data_key(da_info.height, da_info.transaction_commitment) + + # Create record for arbitrum_da_multi_purpose table with batch_number set to nil + da_record = %{ + data_type: 0, + data_key: data_key, + data: data, + # This field must be removed as soon as migration to a separate table for Batch-to-DA-record associations is completed. + batch_number: nil + } + + # Create record for arbitrum_batches_to_da_blobs table + batch_to_blob_record = %{ + batch_number: da_info.batch_number, + data_blob_id: data_key + } + + {[da_record | da_records_acc], [batch_to_blob_record | batch_to_blob_acc]} end end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/da/common.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/da/common.ex index 80acc1a7c2ad..83ca7c45483b 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/da/common.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/da/common.ex @@ -51,34 +51,40 @@ defmodule Indexer.Fetcher.Arbitrum.DA.Common do and configuration parameters for the JSON RPC connection. ## Returns - - A list of data structures ready for import, each containing: - - `:data_key`: A binary key identifying the data. - - `:data_type`: An integer indicating the type of data, which can be `0` - for data blob descriptors and `1` for Anytrust keyset descriptors. - - `:data`: A map containing the DA information. - - `:batch_number`: The batch number associated with the data, or `nil`. + - A tuple containing: + - A list of DA records (`DaMultiPurposeRecord`) ready for import, each containing: + - `:data_key`: A binary key identifying the data. + - `:data_type`: An integer indicating the type of data, which can be `0` + for data blob descriptors and `1` for Anytrust keyset descriptors. + - `:data`: A map containing the DA information. + - `:batch_number`: The batch number associated with the data, or `nil`. + - A list of batch-to-blob associations (`BatchToDaBlob`) ready for import. """ @spec prepare_for_import([Celestia.t() | Anytrust.t() | map()], %{ :sequencer_inbox_address => String.t(), :json_rpc_named_arguments => EthereumJSONRPC.json_rpc_named_arguments() - }) :: [Arbitrum.DaMultiPurposeRecord.to_import()] - def prepare_for_import([], _), do: [] + }) :: {[Arbitrum.DaMultiPurposeRecord.to_import()], [Arbitrum.BatchToDaBlob.to_import()]} + def prepare_for_import([], _), do: {[], []} def prepare_for_import(da_info, l1_connection_config) do da_info - |> Enum.reduce({[], MapSet.new()}, fn info, {acc, cache} -> + |> Enum.reduce({{[], []}, MapSet.new()}, fn info, {{da_records_acc, batch_to_blob_acc}, cache} -> case info do %Celestia{} -> - {Celestia.prepare_for_import(acc, info), cache} + {da_records, batch_to_blobs} = Celestia.prepare_for_import({da_records_acc, batch_to_blob_acc}, info) + {{da_records, batch_to_blobs}, cache} %Anytrust{} -> - Anytrust.prepare_for_import(acc, info, l1_connection_config, cache) + {{da_records, batch_to_blobs}, updated_cache} = + Anytrust.prepare_for_import({da_records_acc, batch_to_blob_acc}, info, l1_connection_config, cache) + + {{da_records, batch_to_blobs}, updated_cache} _ -> - {acc, cache} + {{da_records_acc, batch_to_blob_acc}, cache} end end) - |> Kernel.elem(0) + |> then(fn {{da_records, batch_to_blobs}, _cache} -> {da_records, batch_to_blobs} end) end @doc """ diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/messages_to_l2_matcher.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/messages_to_l2_matcher.ex index 8b855784b9ef..49cfa23c9ee4 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/messages_to_l2_matcher.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/messages_to_l2_matcher.ex @@ -29,7 +29,7 @@ defmodule Indexer.Fetcher.Arbitrum.MessagesToL2Matcher do alias Indexer.BufferedTask alias Indexer.Fetcher.Arbitrum.MessagesToL2Matcher.Supervisor, as: MessagesToL2MatcherSupervisor alias Indexer.Fetcher.Arbitrum.Messaging, as: MessagingUtils - alias Indexer.Fetcher.Arbitrum.Utils.Db + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper @behaviour BufferedTask @@ -242,7 +242,7 @@ defmodule Indexer.Fetcher.Arbitrum.MessagesToL2Matcher do # - Values are original message IDs as 256-bit hexadecimal strings. @spec get_hashed_ids_for_uncompleted_messages() :: %{binary() => binary()} defp get_hashed_ids_for_uncompleted_messages do - Db.get_uncompleted_l1_to_l2_messages_ids() + DbMessages.get_uncompleted_l1_to_l2_messages_ids() |> Enum.reduce(%{}, fn id, acc -> Map.put( acc, diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex index 04f4268cf950..662a35784cce 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/messaging.ex @@ -9,28 +9,19 @@ defmodule Indexer.Fetcher.Arbitrum.Messaging do """ import EthereumJSONRPC, only: [quantity_to_integer: 1] - - import Explorer.Helper, only: [decode_data: 2] + alias EthereumJSONRPC.Arbitrum, as: ArbitrumRpc + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] alias Explorer.Chain alias Explorer.Chain.Arbitrum.Message - alias Indexer.Fetcher.Arbitrum.Utils.Db - + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Db.Settlement, as: DbSettlement require Logger @zero_hex_prefix "0x" <> String.duplicate("0", 56) - @l2_to_l1_event_unindexed_params [ - :address, - {:uint, 256}, - {:uint, 256}, - {:uint, 256}, - {:uint, 256}, - :bytes - ] - @typep min_transaction :: %{ :hash => binary(), :type => non_neg_integer(), @@ -127,7 +118,7 @@ defmodule Indexer.Fetcher.Arbitrum.Messaging do filtered_logs = logs |> Enum.filter(fn event -> - event.address_hash == arbsys_contract and event.first_topic == Db.l2_to_l1_event() + event.address_hash == arbsys_contract and event.first_topic == ArbitrumEvents.l2_to_l1() end) handle_filtered_l2_to_l1_messages(filtered_logs) @@ -193,31 +184,33 @@ defmodule Indexer.Fetcher.Arbitrum.Messaging do def handle_filtered_l2_to_l1_messages(filtered_logs, caller) when is_list(filtered_logs) do # Get values before the loop parsing the events to reduce number of DB requests - highest_committed_block = Db.highest_committed_block(-1) - highest_confirmed_block = Db.highest_confirmed_block(-1) + highest_committed_block = DbSettlement.highest_committed_block(-1) + highest_confirmed_block = DbSettlement.highest_confirmed_block(-1) messages_map = filtered_logs |> Enum.reduce(%{}, fn event, messages_acc -> log_debug("L2 to L1 message #{event.transaction_hash} found") - {message_id, caller, blocknum, timestamp} = l2_to_l1_event_parse(event) + fields = + event + |> ArbitrumRpc.l2_to_l1_event_parse() message = %{ direction: :from_l2, - message_id: message_id, - originator_address: caller, + message_id: fields.message_id, + originator_address: fields.caller, originating_transaction_hash: event.transaction_hash, - origination_timestamp: timestamp, - originating_transaction_block_number: blocknum, - status: status_l2_to_l1_message(blocknum, highest_committed_block, highest_confirmed_block) + origination_timestamp: Timex.from_unix(fields.timestamp), + originating_transaction_block_number: fields.arb_block_number, + status: status_l2_to_l1_message(fields.arb_block_number, highest_committed_block, highest_confirmed_block) } |> complete_to_params() Map.put( messages_acc, - message_id, + fields.message_id, message ) end) @@ -276,23 +269,6 @@ defmodule Indexer.Fetcher.Arbitrum.Messaging do end) end - # Parses an L2-to-L1 event, extracting relevant information from the event's data. - @spec l2_to_l1_event_parse(min_log()) :: {non_neg_integer(), binary(), non_neg_integer(), DateTime.t()} - defp l2_to_l1_event_parse(event) do - [ - caller, - arb_block_num, - _eth_block_num, - timestamp, - _callvalue, - _data - ] = decode_data(event.data, @l2_to_l1_event_unindexed_params) - - position = quantity_to_integer(event.fourth_topic) - - {position, caller, arb_block_num, Timex.from_unix(timestamp)} - end - # Determines the status of an L2-to-L1 message based on its block number and the highest # committed and confirmed block numbers. @spec status_l2_to_l1_message(non_neg_integer(), non_neg_integer(), non_neg_integer()) :: @@ -321,7 +297,7 @@ defmodule Indexer.Fetcher.Arbitrum.Messaging do defp find_and_update_executed_messages(messages) do messages |> Map.keys() - |> Db.l1_executions() + |> DbMessages.l1_executions() |> Enum.reduce(messages, fn execution, messages_acc -> message = messages_acc diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex index eb66752bf1f2..1cb4f721b7fe 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/rollup_messages_catchup.ex @@ -61,7 +61,7 @@ defmodule Indexer.Fetcher.Arbitrum.RollupMessagesCatchup do import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] - alias Indexer.Fetcher.Arbitrum.Utils.Db + alias Indexer.Fetcher.Arbitrum.Utils.Db.Common, as: DbCommon alias Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 require Logger @@ -153,7 +153,7 @@ defmodule Indexer.Fetcher.Arbitrum.RollupMessagesCatchup do end new_data = - case Db.closest_block_after_timestamp(time_of_start) do + case DbCommon.closest_block_after_timestamp(time_of_start) do {:ok, block} -> Process.send(self(), :init_worker, []) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex index 8e42afd52242..f8a49f2e51b3 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_batches_statuses.ex @@ -52,8 +52,11 @@ defmodule Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses do import Indexer.Fetcher.Arbitrum.Utils.Helper, only: [increase_duration: 2] + alias EthereumJSONRPC.Arbitrum, as: ArbitrumRpc + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Db.Settlement, as: DbSettlement + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Indexer.Helper, as: IndexerHelper - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} require Logger @@ -160,7 +163,7 @@ defmodule Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses do } = state ) do %{outbox: outbox_address, sequencer_inbox: sequencer_inbox_address} = - Rpc.get_contracts_for_rollup( + ArbitrumRpc.get_contracts_for_rollup( l1_rollup_address, :inbox_outbox, json_l1_rpc_named_arguments @@ -168,15 +171,15 @@ defmodule Indexer.Fetcher.Arbitrum.TrackingBatchesStatuses do l1_start_block = Rpc.get_l1_start_block(state.config.l1_start_block, json_l1_rpc_named_arguments) - new_batches_start_block = Db.l1_block_to_discover_latest_committed_batch(l1_start_block) - historical_batches_end_block = Db.l1_block_to_discover_earliest_committed_batch(l1_start_block - 1) + new_batches_start_block = DbSettlement.l1_block_to_discover_latest_committed_batch(l1_start_block) + historical_batches_end_block = DbSettlement.l1_block_to_discover_earliest_committed_batch(l1_start_block - 1) - new_confirmations_start_block = Db.l1_block_of_latest_confirmed_block(l1_start_block) + new_confirmations_start_block = DbSettlement.l1_block_of_latest_confirmed_block(l1_start_block) - new_executions_start_block = Db.l1_block_to_discover_latest_execution(l1_start_block) - historical_executions_end_block = Db.l1_block_to_discover_earliest_execution(l1_start_block - 1) + new_executions_start_block = DbMessages.l1_block_to_discover_latest_execution(l1_start_block) + historical_executions_end_block = DbMessages.l1_block_to_discover_earliest_execution(l1_start_block - 1) - {lowest_batch, missing_batches_end_batch} = Db.get_min_max_batch_numbers() + {lowest_batch, missing_batches_end_batch} = DbSettlement.get_min_max_batch_numbers() Process.send(self(), :check_new_batches, []) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex index 59c43d48a528..a0bd16bc36b9 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/tracking_messages_on_l1.ex @@ -33,10 +33,13 @@ defmodule Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1 do import Indexer.Fetcher.Arbitrum.Utils.Helper, only: [increase_duration: 2] + alias EthereumJSONRPC.Arbitrum, as: ArbitrumRpc + alias Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Indexer.Helper, as: IndexerHelper - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} require Logger @@ -120,11 +123,15 @@ defmodule Indexer.Fetcher.Arbitrum.TrackingMessagesOnL1 do %{config: %{l1_rollup_address: _, json_l1_rpc_named_arguments: _, l1_start_block: _}, data: _} = state ) do %{bridge: bridge_address} = - Rpc.get_contracts_for_rollup(state.config.l1_rollup_address, :bridge, state.config.json_l1_rpc_named_arguments) + ArbitrumRpc.get_contracts_for_rollup( + state.config.l1_rollup_address, + :bridge, + state.config.json_l1_rpc_named_arguments + ) l1_start_block = Rpc.get_l1_start_block(state.config.l1_start_block, state.config.json_l1_rpc_named_arguments) - new_msg_to_l2_start_block = Db.l1_block_to_discover_latest_message_to_l2(l1_start_block) - historical_msg_to_l2_end_block = Db.l1_block_to_discover_earliest_message_to_l2(l1_start_block - 1) + new_msg_to_l2_start_block = DbMessages.l1_block_to_discover_latest_message_to_l2(l1_start_block) + historical_msg_to_l2_end_block = DbMessages.l1_block_to_discover_earliest_message_to_l2(l1_start_block - 1) Process.send(self(), :check_new_msgs_to_rollup, []) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex deleted file mode 100644 index 849e2f60c951..000000000000 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db.ex +++ /dev/null @@ -1,984 +0,0 @@ -defmodule Indexer.Fetcher.Arbitrum.Utils.Db do - @moduledoc """ - Common functions to simplify DB routines for Indexer.Fetcher.Arbitrum fetchers - """ - - import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] - - alias Explorer.Chain - alias Explorer.Chain.Arbitrum - alias Explorer.Chain.Arbitrum.Reader - alias Explorer.Chain.Block, as: FullBlock - alias Explorer.Chain.{Data, Hash} - - alias Explorer.Utility.MissingBlockRange - - require Logger - - # 32-byte signature of the event L2ToL1Tx(address caller, address indexed destination, uint256 indexed hash, uint256 indexed position, uint256 arbBlockNum, uint256 ethBlockNum, uint256 timestamp, uint256 callvalue, bytes data) - @l2_to_l1_event "0x3e7aafa77dbf186b7fd488006beff893744caa3c4f6f299e8a709fa2087374fc" - - @doc """ - Indexes L1 transactions provided in the input map. For transactions that - are already in the database, existing indices are taken. For new transactions, - the next available indices are assigned. - - ## Parameters - - `new_l1_transactions`: A map of L1 transaction descriptions. The keys of the map are - transaction hashes. - - ## Returns - - `l1_transactions`: A map of L1 transaction descriptions. Each element is extended with - the key `:id`, representing the index of the L1 transaction in the - `arbitrum_lifecycle_l1_transactions` table. - """ - @spec get_indices_for_l1_transactions(%{ - binary() => %{ - :hash => binary(), - :block_number => FullBlock.block_number(), - :timestamp => DateTime.t(), - :status => :unfinalized | :finalized, - optional(:id) => non_neg_integer() - } - }) :: %{binary() => Arbitrum.LifecycleTransaction.to_import()} - # TODO: consider a way to remove duplicate with ZkSync.Utils.Db - def get_indices_for_l1_transactions(new_l1_transactions) - when is_map(new_l1_transactions) do - # Get indices for l1 transactions previously handled - l1_transactions = - new_l1_transactions - |> Map.keys() - |> Reader.lifecycle_transaction_ids() - |> Enum.reduce(new_l1_transactions, fn {hash, id}, transactions -> - {_, transactions} = - Map.get_and_update!(transactions, hash.bytes, fn l1_transaction -> - {l1_transaction, Map.put(l1_transaction, :id, id)} - end) - - transactions - end) - - # Get the next index for the first new transaction based - # on the indices existing in DB - l1_transaction_next_id = Reader.next_lifecycle_transaction_id() - - # Assign new indices for the transactions which are not in - # the l1 transactions table yet - {updated_l1_transactions, _} = - l1_transactions - |> Map.keys() - |> Enum.reduce( - {l1_transactions, l1_transaction_next_id}, - fn hash, {transactions, next_id} -> - transaction = transactions[hash] - id = Map.get(transaction, :id) - - if is_nil(id) do - {Map.put(transactions, hash, Map.put(transaction, :id, next_id)), next_id + 1} - else - {transactions, next_id} - end - end - ) - - updated_l1_transactions - end - - @doc """ - Reads a list of L1 transactions by their hashes from the - `arbitrum_lifecycle_l1_transactions` table and converts them to maps. - - ## Parameters - - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. - - ## Returns - - A list of maps representing the `Explorer.Chain.Arbitrum.LifecycleTransaction` - corresponding to the hashes from the input list. The output list is - compatible with the database import operation. - """ - @spec lifecycle_transactions([binary()]) :: [Arbitrum.LifecycleTransaction.to_import()] - def lifecycle_transactions(l1_transaction_hashes) do - l1_transaction_hashes - |> Reader.lifecycle_transactions() - |> Enum.map(&lifecycle_transaction_to_map/1) - end - - @doc """ - Calculates the next L1 block number to search for the latest committed batch. - - ## Parameters - - `value_if_nil`: The default value to return if no committed batch is found. - - ## Returns - - The next L1 block number after the latest committed batch or `value_if_nil` if no committed batches are found. - """ - @spec l1_block_to_discover_latest_committed_batch(FullBlock.block_number() | nil) :: FullBlock.block_number() | nil - def l1_block_to_discover_latest_committed_batch(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_latest_committed_batch() do - nil -> - log_warning("No committed batches found in DB") - value_if_nil - - value -> - value + 1 - end - end - - @doc """ - Calculates the L1 block number to start the search for committed batches. - - Returns the block number of the earliest L1 block containing a transaction - that commits a batch, as found in the database. If no committed batches are - found, it returns a default value. It's safe to use the returned block number - for subsequent searches, even if it corresponds to a block we've previously - processed. This is because multiple transactions committing different batches - can exist within the same block, and revisiting this block ensures no batches - are missed. - - The batch discovery process is expected to handle potential duplicates - correctly without creating redundant database entries. - - ## Parameters - - `value_if_nil`: The default value to return if no committed batch is found. - - ## Returns - - The L1 block number containing the earliest committed batch or `value_if_nil`. - """ - @spec l1_block_to_discover_earliest_committed_batch(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_to_discover_earliest_committed_batch(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_earliest_committed_batch() do - nil -> - log_warning("No committed batches found in DB") - value_if_nil - - value -> - value - end - end - - @doc """ - Retrieves the block number of the highest rollup block that has been included in a batch. - - ## Parameters - - `value_if_nil`: The default value to return if no rollup batches are found. - - ## Returns - - The number of the highest rollup block included in a batch - or `value_if_nil` if no rollup batches are found. - """ - @spec highest_committed_block(nil | integer()) :: nil | FullBlock.block_number() - def highest_committed_block(value_if_nil) - when is_integer(value_if_nil) or is_nil(value_if_nil) do - case Reader.highest_committed_block() do - nil -> value_if_nil - value -> value - end - end - - @doc """ - Calculates the next L1 block number to search for the latest message sent to L2. - - ## Parameters - - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. - - ## Returns - - The L1 block number immediately following the latest discovered message to L2, - or `value_if_nil` if no messages to L2 have been found. - """ - @spec l1_block_to_discover_latest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_to_discover_latest_message_to_l2(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_latest_discovered_message_to_l2() do - nil -> - log_warning("No messages to L2 found in DB") - value_if_nil - - value -> - value + 1 - end - end - - @doc """ - Calculates the next L1 block number to start the search for messages sent to L2 - that precede the earliest message already discovered. - - ## Parameters - - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. - - ## Returns - - The L1 block number immediately preceding the earliest discovered message to L2, - or `value_if_nil` if no messages to L2 have been found. - """ - @spec l1_block_to_discover_earliest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_to_discover_earliest_message_to_l2(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_earliest_discovered_message_to_l2() do - nil -> - log_warning("No messages to L2 found in DB") - value_if_nil - - value -> - value - 1 - end - end - - @doc """ - Retrieves the L1 block number immediately following the block where the confirmation transaction - for the highest confirmed rollup block was included. - - ## Parameters - - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. - - ## Returns - - The L1 block number immediately after the block containing the confirmation transaction of - the highest confirmed rollup block, or `value_if_nil` if no confirmed rollup blocks are present. - """ - @spec l1_block_of_latest_confirmed_block(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_of_latest_confirmed_block(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_latest_confirmed_block() do - nil -> - log_warning("No confirmed blocks found in DB") - value_if_nil - - value -> - value + 1 - end - end - - @doc """ - Retrieves the block number of the highest rollup block for which a confirmation transaction - has been sent to L1. - - ## Parameters - - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. - - ## Returns - - The block number of the highest confirmed rollup block, - or `value_if_nil` if no confirmed rollup blocks are found in the database. - """ - @spec highest_confirmed_block(nil | integer()) :: nil | FullBlock.block_number() - def highest_confirmed_block(value_if_nil) - when is_integer(value_if_nil) or is_nil(value_if_nil) do - case Reader.highest_confirmed_block() do - nil -> value_if_nil - value -> value - end - end - - @doc """ - Determines the next L1 block number to search for the latest execution of an L2-to-L1 message. - - ## Parameters - - `value_if_nil`: The default value to return if no execution transactions for L2-to-L1 messages - have been recorded. - - ## Returns - - The L1 block number following the block that contains the latest execution transaction - for an L2-to-L1 message, or `value_if_nil` if no such executions have been found. - """ - @spec l1_block_to_discover_latest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_to_discover_latest_execution(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_latest_execution() do - nil -> - log_warning("No L1 executions found in DB") - value_if_nil - - value -> - value + 1 - end - end - - @doc """ - Determines the L1 block number just before the block that contains the earliest known - execution transaction for an L2-to-L1 message. - - ## Parameters - - `value_if_nil`: The default value to return if no execution transactions for - L2-to-L1 messages have been found. - - ## Returns - - The L1 block number preceding the earliest known execution transaction for - an L2-to-L1 message, or `value_if_nil` if no such executions are found in the database. - """ - @spec l1_block_to_discover_earliest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() - def l1_block_to_discover_earliest_execution(value_if_nil) - when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do - case Reader.l1_block_of_earliest_execution() do - nil -> - log_warning("No L1 executions found in DB") - value_if_nil - - value -> - value - 1 - end - end - - @doc """ - Retrieves full details of rollup blocks, including associated transactions, for each block number specified in the input list. - - ## Parameters - - `list_of_block_numbers`: A list of block numbers for which full block details are to be retrieved. - - ## Returns - - A list of `Explorer.Chain.Block` instances containing detailed information for each - block number in the input list. Returns an empty list if no blocks are found for the given numbers. - """ - @spec rollup_blocks([FullBlock.block_number()]) :: [FullBlock.t()] - def rollup_blocks(list_of_block_numbers), do: Reader.rollup_blocks(list_of_block_numbers) - - @doc """ - Retrieves unfinalized L1 transactions that are involved in changing the statuses - of rollup blocks or transactions. - - An L1 transaction is considered unfinalized if it has not yet reached a state - where it is permanently included in the blockchain, meaning it is still susceptible - to potential reorganization or change. Transactions are evaluated against - the finalized_block parameter to determine their finalized status. - - ## Parameters - - `finalized_block`: The block number up to which unfinalized transactions are to be retrieved. - - ## Returns - - A list of maps representing unfinalized L1 transactions and compatible with the - database import operation. - """ - @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [Arbitrum.LifecycleTransaction.to_import()] - def lifecycle_unfinalized_transactions(finalized_block) - when is_integer(finalized_block) and finalized_block >= 0 do - finalized_block - |> Reader.lifecycle_unfinalized_transactions() - |> Enum.map(&lifecycle_transaction_to_map/1) - end - - @doc """ - Retrieves the block number associated with a specific hash of a rollup block. - - ## Parameters - - `hash`: The hash of the rollup block whose number is to be retrieved. - - ## Returns - - The block number associated with the given rollup block hash. - """ - @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil - def rollup_block_hash_to_num(hash) when is_binary(hash) do - Reader.rollup_block_hash_to_num(hash) - end - - @doc """ - Retrieves the L1 batch that includes a specified rollup block number. - - ## Parameters - - `num`: The block number of the rollup block for which the containing - L1 batch is to be retrieved. - - ## Returns - - The `Explorer.Chain.Arbitrum.L1Batch` associated with the given rollup block number - if it exists and its commit transaction is loaded. - """ - @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: Arbitrum.L1Batch.t() | nil - def get_batch_by_rollup_block_number(num) - when is_integer(num) and num >= 0 do - case Reader.get_batch_by_rollup_block_number(num) do - nil -> - nil - - batch -> - case batch.commitment_transaction do - nil -> - raise "Incorrect state of the DB: commitment_transaction is not loaded for the batch with number #{num}" - - %Ecto.Association.NotLoaded{} -> - raise "Incorrect state of the DB: commitment_transaction is not loaded for the batch with number #{num}" - - _ -> - batch - end - end - end - - @doc """ - Retrieves a batch by its number. - - ## Parameters - - `number`: The number of a rollup batch. - - ## Returns - - An instance of `Explorer.Chain.Arbitrum.L1Batch`, or `nil` if no batch with - such a number is found. - """ - @spec get_batch_by_number(non_neg_integer()) :: Arbitrum.L1Batch.t() | nil - def get_batch_by_number(number) do - Reader.get_batch_by_number(number) - end - - @doc """ - Retrieves rollup blocks within a specified block range that have not yet been confirmed. - - ## Parameters - - `first_block`: The starting block number of the range to search for unconfirmed rollup blocks. - - `last_block`: The ending block number of the range. - - ## Returns - - A list of maps, each representing an unconfirmed rollup block within the specified range. - If no unconfirmed blocks are found within the range, an empty list is returned. - """ - @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [ - Arbitrum.BatchBlock.to_import() - ] - def unconfirmed_rollup_blocks(first_block, last_block) - when is_integer(first_block) and first_block >= 0 and - is_integer(last_block) and first_block <= last_block do - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.unconfirmed_rollup_blocks(first_block, last_block) - |> Enum.map(&rollup_block_to_map/1) - end - - @doc """ - Counts the number of confirmed rollup blocks in a specified batch. - - ## Parameters - - `batch_number`: The batch number for which the count of confirmed rollup blocks - is to be determined. - - ## Returns - - A number of rollup blocks confirmed in the specified batch. - """ - @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer() - def count_confirmed_rollup_blocks_in_batch(batch_number) - when is_integer(batch_number) and batch_number >= 0 do - Reader.count_confirmed_rollup_blocks_in_batch(batch_number) - end - - @doc """ - Retrieves a list of L2-to-L1 messages that have been initiated up to - a specified rollup block number. - - ## Parameters - - `block_number`: The block number up to which initiated L2-to-L1 messages - should be retrieved. - - ## Returns - - A list of maps, each representing an initiated L2-to-L1 message compatible with the - database import operation. If no initiated messages are found up to the specified - block number, an empty list is returned. - """ - @spec initiated_l2_to_l1_messages(FullBlock.block_number()) :: [Arbitrum.Message.to_import()] - def initiated_l2_to_l1_messages(block_number) - when is_integer(block_number) and block_number >= 0 do - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.l2_to_l1_messages(:initiated, block_number) - |> Enum.map(&message_to_map/1) - end - - @doc """ - Retrieves a list of L2-to-L1 'sent' messages that have been included up to - a specified rollup block number. - - A message is considered 'sent' when there is a batch including the transaction - that initiated the message, and this batch has been successfully delivered to L1. - - ## Parameters - - `block_number`: The block number up to which sent L2-to-L1 messages are to be retrieved. - - ## Returns - - A list of maps, each representing a sent L2-to-L1 message compatible with the - database import operation. If no messages with the 'sent' status are found by - the specified block number, an empty list is returned. - """ - @spec sent_l2_to_l1_messages(FullBlock.block_number()) :: [Arbitrum.Message.to_import()] - def sent_l2_to_l1_messages(block_number) - when is_integer(block_number) and block_number >= 0 do - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.l2_to_l1_messages(:sent, block_number) - |> Enum.map(&message_to_map/1) - end - - @doc """ - Retrieves a list of L2-to-L1 'confirmed' messages that have been included up to - a specified rollup block number. - - A message is considered 'confirmed' when its transaction was included in a rollup block, - and the confirmation of this block has been delivered to L1. - - ## Parameters - - `block_number`: The block number up to which confirmed L2-to-L1 messages are to be retrieved. - - ## Returns - - A list of maps, each representing a confirmed L2-to-L1 message compatible with the - database import operation. If no messages with the 'confirmed' status are found by - the specified block number, an empty list is returned. - """ - @spec confirmed_l2_to_l1_messages() :: [Arbitrum.Message.to_import()] - def confirmed_l2_to_l1_messages do - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.l2_to_l1_messages(:confirmed, nil) - |> Enum.map(&message_to_map/1) - end - - @doc """ - Checks if the numbers from the provided list correspond to the numbers of indexed batches. - - ## Parameters - - `batches_numbers`: The list of batch numbers. - - ## Returns - - A list of batch numbers that are indexed and match the provided list, or `[]` - if none of the batch numbers in the provided list exist in the database. The output list - may be smaller than the input list. - """ - @spec batches_exist([non_neg_integer()]) :: [non_neg_integer()] - def batches_exist(batches_numbers) when is_list(batches_numbers) do - Reader.batches_exist(batches_numbers) - end - - @doc """ - Reads a list of transactions executing L2-to-L1 messages by their IDs. - - ## Parameters - - `message_ids`: A list of IDs to retrieve executing transactions for. - - ## Returns - - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from - the input list. The output list may be smaller than the input list if some IDs do not - correspond to any existing transactions. - """ - @spec l1_executions([non_neg_integer()]) :: [Arbitrum.L1Execution.t()] - def l1_executions(message_ids) when is_list(message_ids) do - Reader.l1_executions(message_ids) - end - - @doc """ - Identifies the range of L1 blocks to investigate for missing confirmations of rollup blocks. - - This function determines the L1 block numbers bounding the interval where gaps in rollup block - confirmations might exist. It uses the earliest and latest L1 block numbers associated with - unconfirmed rollup blocks to define this range. - - ## Parameters - - `right_pos_value_if_nil`: The default value to use for the upper bound of the range if no - confirmed blocks found. - - ## Returns - - A tuple containing two elements: the lower and upper bounds of L1 block numbers to check - for missing rollup block confirmations. If the necessary confirmation data is unavailable, - the first element will be `nil`, and the second will be `right_pos_value_if_nil`. - """ - @spec l1_blocks_to_expect_rollup_blocks_confirmation(nil | FullBlock.block_number()) :: - {nil | FullBlock.block_number(), nil | FullBlock.block_number()} - def l1_blocks_to_expect_rollup_blocks_confirmation(right_pos_value_if_nil) - when (is_integer(right_pos_value_if_nil) and right_pos_value_if_nil >= 0) or is_nil(right_pos_value_if_nil) do - case Reader.l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() do - nil -> - log_warning("No L1 confirmations found in DB") - {nil, right_pos_value_if_nil} - - {nil, newer_confirmation_l1_block} -> - {nil, newer_confirmation_l1_block - 1} - - {older_confirmation_l1_block, newer_confirmation_l1_block} -> - {older_confirmation_l1_block + 1, newer_confirmation_l1_block - 1} - end - end - - @doc """ - Retrieves the transaction hashes as strings for missed L1-to-L2 messages within - a specified block range. - - The function identifies missed messages by checking transactions of specific - types that are supposed to contain L1-to-L2 messages and verifying if there are - corresponding entries in the messages table. A message is considered missed if - there is a transaction without a matching message record within the specified - block range. - - ## Parameters - - `start_block`: The starting block number of the range. - - `end_block`: The ending block number of the range. - - ## Returns - - A list of transaction hashes as strings for missed L1-to-L2 messages. - """ - @spec transactions_for_missed_messages_to_l2(non_neg_integer(), non_neg_integer()) :: [String.t()] - def transactions_for_missed_messages_to_l2(start_block, end_block) do - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.transactions_for_missed_messages_to_l2(start_block, end_block) - |> Enum.map(&Hash.to_string/1) - end - - @doc """ - Retrieves the logs for missed L2-to-L1 messages within a specified block range - and converts them to maps. - - The function identifies missed messages by checking logs for the specified - L2-to-L1 event and verifying if there are corresponding entries in the messages - table. A message is considered missed if there is a log entry without a - matching message record within the specified block range. - - ## Parameters - - `start_block`: The starting block number of the range. - - `end_block`: The ending block number of the range. - - ## Returns - - A list of maps representing the logs for missed L2-to-L1 messages. - """ - @spec logs_for_missed_messages_from_l2(non_neg_integer(), non_neg_integer()) :: [ - %{ - data: String.t(), - index: non_neg_integer(), - first_topic: String.t(), - second_topic: String.t(), - third_topic: String.t(), - fourth_topic: String.t(), - address_hash: String.t(), - transaction_hash: String.t(), - block_hash: String.t(), - block_number: FullBlock.block_number() - } - ] - def logs_for_missed_messages_from_l2(start_block, end_block) do - arbsys_contract = Application.get_env(:indexer, Indexer.Fetcher.Arbitrum.Messaging)[:arbsys_contract] - - # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart - Reader.logs_for_missed_messages_from_l2(start_block, end_block, arbsys_contract, @l2_to_l1_event) - |> Enum.map(&logs_to_map/1) - end - - @doc """ - Retrieves L1 block ranges that could be used to re-discover missing batches - within a specified range of batch numbers. - - This function identifies the L1 block ranges corresponding to missing L1 batches - within the given range of batch numbers. It first finds the missing batches, - then determines their neighboring ranges, and finally maps these ranges to the - corresponding L1 block numbers. - - ## Parameters - - `start_batch_number`: The starting batch number of the search range. - - `end_batch_number`: The ending batch number of the search range. - - `block_for_batch_0`: The L1 block number corresponding to the batch number 0. - - ## Returns - - A list of tuples, each containing a start and end L1 block number for the - ranges corresponding to the missing batches. - - ## Examples - - Example #1 - - Within the range from 1 to 10, the missing batch is 2. The L1 block for the - batch #1 is 10, and the L1 block for the batch #3 is 31. - - The output will be `[{10, 31}]`. - - Example #2 - - Within the range from 1 to 10, the missing batches are 2 and 6, and - - The L1 block for the batch #1 is 10. - - The L1 block for the batch #3 is 31. - - The L1 block for the batch #5 is 64. - - The L1 block for the batch #7 is 90. - - The output will be `[{10, 31}, {64, 90}]`. - - Example #3 - - Within the range from 1 to 10, the missing batches are 2 and 4, and - - The L1 block for the batch #1 is 10. - - The L1 block for the batch #3 is 31. - - The L1 block for the batch #5 is 64. - - The output will be `[{10, 31}, {32, 64}]`. - - Example #4 - - Within the range from 1 to 10, the missing batches are 2 and 4, and - - The L1 block for the batch #1 is 10. - - The L1 block for the batch #3 is 31. - - The L1 block for the batch #5 is 31. - - The output will be `[{10, 31}]`. - """ - @spec get_l1_block_ranges_for_missing_batches(non_neg_integer(), non_neg_integer(), FullBlock.block_number()) :: [ - {FullBlock.block_number(), FullBlock.block_number()} - ] - def get_l1_block_ranges_for_missing_batches(start_batch_number, end_batch_number, block_for_batch_0) - when is_integer(start_batch_number) and is_integer(end_batch_number) and end_batch_number >= start_batch_number do - # credo:disable-for-lines:4 Credo.Check.Refactor.PipeChainStart - neighbors_of_missing_batches = - Reader.find_missing_batches(start_batch_number, end_batch_number) - |> list_to_chunks() - |> chunks_to_neighbor_ranges() - - batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) - end - - # Splits a list into chunks of consecutive numbers, e.g., [1, 2, 3, 5, 6, 8] becomes [[1, 2, 3], [5, 6], [8]]. - @spec list_to_chunks([non_neg_integer()]) :: [[non_neg_integer()]] - defp list_to_chunks(list) do - chunk_fun = fn current, acc -> - case acc do - [] -> - {:cont, [current]} - - [last | _] = acc when current == last + 1 -> - {:cont, [current | acc]} - - acc -> - {:cont, Enum.reverse(acc), [current]} - end - end - - after_fun = fn acc -> - case acc do - # Special case to handle the situation when the initial list is empty - [] -> {:cont, []} - _ -> {:cont, Enum.reverse(acc), []} - end - end - - list - |> Enum.chunk_while([], chunk_fun, after_fun) - end - - # Converts chunks of elements into neighboring ranges, e.g., [[1, 2], [4]] becomes [{0, 3}, {3, 5}]. - @spec chunks_to_neighbor_ranges([[non_neg_integer()]]) :: [{non_neg_integer(), non_neg_integer()}] - defp chunks_to_neighbor_ranges([]), do: [] - - defp chunks_to_neighbor_ranges(list_of_chunks) do - list_of_chunks - |> Enum.map(fn current -> - case current do - [one_element] -> {one_element - 1, one_element + 1} - chunk -> {List.first(chunk) - 1, List.last(chunk) + 1} - end - end) - end - - # Converts batch number gaps to L1 block ranges for missing batches discovery. - # - # This function takes a list of neighboring batch number ranges representing gaps - # in the batch sequence and converts them to corresponding L1 block ranges. These - # L1 block ranges can be used to rediscover missing batches. - # - # ## Parameters - # - `neighbors_of_missing_batches`: A list of tuples, each containing the start - # and end batch numbers of a gap in the batch sequence. - # - `block_for_batch_0`: The L1 block number corresponding to batch number 0. - # - # ## Returns - # - A list of tuples, each containing the start and end L1 block numbers for - # ranges where missing batches should be rediscovered. - @spec batches_gaps_to_block_ranges([{non_neg_integer(), non_neg_integer()}], FullBlock.block_number()) :: - [{FullBlock.block_number(), FullBlock.block_number()}] - defp batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) - - defp batches_gaps_to_block_ranges([], _), do: [] - - defp batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) do - l1_blocks = - neighbors_of_missing_batches - |> Enum.reduce(MapSet.new(), fn {start_batch, end_batch}, acc -> - acc - |> MapSet.put(start_batch) - |> MapSet.put(end_batch) - end) - # To avoid error in getting L1 block for the batch 0 - |> MapSet.delete(0) - |> MapSet.to_list() - |> Reader.get_l1_blocks_of_batches_by_numbers() - # It is safe to add the block for the batch 0 even if the batch 1 is missing - |> Map.put(0, block_for_batch_0) - - neighbors_of_missing_batches - |> Enum.reduce({[], %{}}, fn {start_batch, end_batch}, {res, blocks_used} -> - range_start = l1_blocks[start_batch] - range_end = l1_blocks[end_batch] - # If the batch's block was already used as a block finishing one of the ranges - # then we should start another range from the next block to avoid discovering - # the same batches batches again. - case {Map.get(blocks_used, range_start, false), range_start == range_end} do - {true, true} -> - # Edge case when the range consists of a single block (several batches in - # the same block) which is going to be inspected up to this moment. - {res, blocks_used} - - {true, false} -> - {[{range_start + 1, range_end} | res], Map.put(blocks_used, range_end, true)} - - {false, _} -> - {[{range_start, range_end} | res], Map.put(blocks_used, range_end, true)} - end - end) - |> elem(0) - end - - @doc """ - Retrieves the minimum and maximum batch numbers of L1 batches. - - ## Returns - - A tuple containing the minimum and maximum batch numbers or `{nil, nil}` if no batches are found. - """ - @spec get_min_max_batch_numbers() :: {non_neg_integer(), non_neg_integer()} | {nil | nil} - def get_min_max_batch_numbers do - Reader.get_min_max_batch_numbers() - end - - @doc """ - Returns 32-byte signature of the event `L2ToL1Tx` - """ - @spec l2_to_l1_event() :: <<_::528>> - def l2_to_l1_event, do: @l2_to_l1_event - - @doc """ - Determines whether a given range of block numbers has been fully indexed without any missing blocks. - - ## Parameters - - `start_block`: The starting block number of the range to check for completeness in indexing. - - `end_block`: The ending block number of the range. - - ## Returns - - `true` if the entire range from `start_block` to `end_block` is indexed and contains no missing - blocks, indicating no intersection with missing block ranges; `false` otherwise. - """ - @spec indexed_blocks?(FullBlock.block_number(), FullBlock.block_number()) :: boolean() - def indexed_blocks?(start_block, end_block) - when is_integer(start_block) and start_block >= 0 and - is_integer(end_block) and start_block <= end_block do - is_nil(MissingBlockRange.intersects_with_range(start_block, end_block)) - end - - @doc """ - Retrieves the block number for the closest block immediately after a given timestamp. - - ## Parameters - - `timestamp`: The `DateTime` timestamp for which the closest subsequent block number is sought. - - ## Returns - - `{:ok, block_number}` where `block_number` is the number of the closest block that occurred - after the specified timestamp. - - `{:error, :not_found}` if no block is found after the specified timestamp. - """ - @spec closest_block_after_timestamp(DateTime.t()) :: {:error, :not_found} | {:ok, FullBlock.block_number()} - def closest_block_after_timestamp(timestamp) do - Chain.timestamp_to_block_number(timestamp, :after, false) - end - - @doc """ - Checks if an AnyTrust keyset exists in the database using the provided keyset hash. - - ## Parameters - - `keyset_hash`: The hash of the keyset to be checked. - - ## Returns - - `true` if the keyset exists, `false` otherwise. - """ - @spec anytrust_keyset_exists?(binary()) :: boolean() - def anytrust_keyset_exists?(keyset_hash) do - not Enum.empty?(Reader.get_anytrust_keyset(keyset_hash)) - end - - @doc """ - Retrieves Data Availability (DA) information for a specific Arbitrum batch number. - - This function queries the database for DA information stored in the - `DaMultiPurposeRecord`. It specifically looks for records where - the `data_type` is 0, which corresponds to batch-specific DA information. - - ## Parameters - - `batch_number`: The Arbitrum batch number. - - ## Returns - - A map containing the DA information for the specified batch number. This map - corresponds to the `data` field of the `DaMultiPurposeRecord`. - - An empty map (`%{}`) if no DA information is found for the given batch number. - """ - @spec get_da_info_by_batch_number(non_neg_integer()) :: map() - def get_da_info_by_batch_number(batch_number) do - Reader.get_da_info_by_batch_number(batch_number) - end - - @doc """ - Retrieves the list of uncompleted L2-to-L1 messages IDs. - - ## Returns - - A list of the IDs of uncompleted L2-to-L1 messages. - """ - @spec get_uncompleted_l1_to_l2_messages_ids() :: [non_neg_integer()] - def get_uncompleted_l1_to_l2_messages_ids do - Reader.get_uncompleted_l1_to_l2_messages_ids() - end - - @spec lifecycle_transaction_to_map(Arbitrum.LifecycleTransaction.t()) :: Arbitrum.LifecycleTransaction.to_import() - defp lifecycle_transaction_to_map(transaction) do - [:id, :hash, :block_number, :timestamp, :status] - |> db_record_to_map(transaction) - end - - @spec rollup_block_to_map(Arbitrum.BatchBlock.t()) :: Arbitrum.BatchBlock.to_import() - defp rollup_block_to_map(block) do - [:batch_number, :block_number, :confirmation_id] - |> db_record_to_map(block) - end - - @spec message_to_map(Arbitrum.Message.t()) :: Arbitrum.Message.to_import() - defp message_to_map(message) do - [ - :direction, - :message_id, - :originator_address, - :originating_transaction_hash, - :origination_timestamp, - :originating_transaction_block_number, - :completion_transaction_hash, - :status - ] - |> db_record_to_map(message) - end - - defp logs_to_map(log) do - [ - :data, - :index, - :first_topic, - :second_topic, - :third_topic, - :fourth_topic, - :address_hash, - :transaction_hash, - :block_hash, - :block_number - ] - |> db_record_to_map(log, true) - end - - # Converts an Arbitrum-related database record to a map with specified keys and optional encoding. - # - # This function is used to transform various Arbitrum-specific database records - # (such as LifecycleTransaction, BatchBlock, or Message) into maps containing - # only the specified keys. It's particularly useful for preparing data for - # import or further processing of Arbitrum blockchain data. - # - # Parameters: - # - `required_keys`: A list of atoms representing the keys to include in the - # output map. - # - `record`: The database record or struct to be converted. - # - `encode`: Boolean flag to determine if Hash and Data types should be - # encoded to strings (default: false). When true, Hash and Data are - # converted to string representations; otherwise, their raw bytes are used. - # - # Returns: - # - A map containing only the required keys from the input record. Hash and - # Data types are either encoded to strings or left as raw bytes based on - # the `encode` parameter. @spec db_record_to_map([atom()], map(), boolean()) :: map() - defp db_record_to_map(required_keys, record, encode \\ false) do - required_keys - |> Enum.reduce(%{}, fn key, record_as_map -> - raw_value = Map.get(record, key) - - # credo:disable-for-lines:5 Credo.Check.Refactor.Nesting - value = - case raw_value do - %Hash{} -> if(encode, do: Hash.to_string(raw_value), else: raw_value.bytes) - %Data{} -> if(encode, do: Data.to_string(raw_value), else: raw_value.bytes) - _ -> raw_value - end - - Map.put(record_as_map, key, value) - end) - end -end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/README.md b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/README.md new file mode 100644 index 000000000000..56ccaf8855db --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/README.md @@ -0,0 +1,37 @@ +# Database Utility Modules + +This directory contains modules that provide structured database access and manipulation for Arbitrum-specific data in the Blockscout indexer. + +## Module Overview + +- `common.ex` - Chain-agnostic database utility functions for block-related operations +- `messages.ex` - Functions for querying and managing cross-chain message data +- `parent_chain_transactions.ex` - Handles L1 transaction indexing and lifecycle management +- `settlement.ex` - Manages batch commitment and state confirmation data +- `tools.ex` - Internal helper functions for database record processing + +## Usage Guidelines + +1. Use logging judiciously to avoid overwhelming the logs with unnecessary information + +2. Use Reader modules from `Explorer.Chain.Arbitrum.Reader` namespace for raw database access if other modules under `Explorer.Chain` do not provide the functionality you need. + +3. Apply additional data transformation to maintain consistency with structures used for data import + +4. Implement proper error handling when database queries return `nil` + +## Module Organization + +The database functionality is split across multiple modules rather than maintained in a single monolithic file for two primary reasons: + +### 1. Collaborative Development + +Splitting functionality across multiple files significantly reduces the likelihood of merge conflicts when multiple developers are working on different features simultaneously. Each module can be modified independently without affecting other parts of the codebase. + +### 2. LLM-Based Development Optimization + +The modular structure is specifically designed to work better with Large Language Model (LLM) based coding assistants: + +- **Output Token Efficiency**: While modern LLMs can handle large files in their input context, they still have limitations on output tokens. Smaller files make it easier for AI assistants to propose and explain changes within these limits. + +- **Focus Window Management**: Smaller, focused modules help maintain a clear context window when working with AI assistants, making it easier to discuss and modify specific functionality without the noise of unrelated code. diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/common.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/common.ex new file mode 100644 index 000000000000..46579fab84e0 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/common.ex @@ -0,0 +1,66 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db.Common do + @moduledoc """ + Provides chain-agnostic database utility functions for block-related operations. + + This module contains general-purpose functions for querying data that are not + specific to Arbitrum and can be used across different blockchain implementations. + Functions in this module operate only on common database table shared across all + chain types. + + Note: Consider relocating these functions to a more general utility module if they + are needed by non-Arbitrum fetchers, as their current placement in the Arbitrum + namespace may be misleading. + """ + + alias Explorer.Chain + alias Explorer.Chain.Arbitrum.Reader.Indexer.General, as: ArbitrumReader + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Utility.MissingBlockRange + + @doc """ + Determines whether a given range of block numbers has been fully indexed without any missing blocks. + + ## Parameters + - `start_block`: The starting block number of the range to check for completeness in indexing. + - `end_block`: The ending block number of the range. + + ## Returns + - `true` if the entire range from `start_block` to `end_block` is indexed and contains no missing + blocks, indicating no intersection with missing block ranges; `false` otherwise. + """ + @spec indexed_blocks?(FullBlock.block_number(), FullBlock.block_number()) :: boolean() + def indexed_blocks?(start_block, end_block) + when is_integer(start_block) and start_block >= 0 and + is_integer(end_block) and start_block <= end_block do + is_nil(MissingBlockRange.intersects_with_range(start_block, end_block)) + end + + @doc """ + Retrieves the block number for the closest block immediately after a given timestamp. + + ## Parameters + - `timestamp`: The `DateTime` timestamp for which the closest subsequent block number is sought. + + ## Returns + - `{:ok, block_number}` where `block_number` is the number of the closest block that occurred + after the specified timestamp. + - `{:error, :not_found}` if no block is found after the specified timestamp. + """ + @spec closest_block_after_timestamp(DateTime.t()) :: {:error, :not_found} | {:ok, FullBlock.block_number()} + def closest_block_after_timestamp(timestamp) do + Chain.timestamp_to_block_number(timestamp, :after, false) + end + + @doc """ + Retrieves full details of rollup blocks, including associated transactions, for each block number specified in the input list. + + ## Parameters + - `list_of_block_numbers`: A list of block numbers for which full block details are to be retrieved. + + ## Returns + - A list of `Explorer.Chain.Block` instances containing detailed information for each + block number in the input list. Returns an empty list if no blocks are found for the given numbers. + """ + @spec rollup_blocks([FullBlock.block_number()]) :: [FullBlock.t()] + def rollup_blocks(list_of_block_numbers), do: ArbitrumReader.rollup_blocks(list_of_block_numbers) +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/messages.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/messages.ex new file mode 100644 index 000000000000..e52d2738c473 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/messages.ex @@ -0,0 +1,312 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db.Messages do + @moduledoc """ + Provides utility functions for querying Arbitrum cross-chain message data. + + This module serves as a wrapper around the database reader functions from + `Explorer.Chain.Arbitrum.Reader.Indexer.Messages`, providing additional data + transformation and error handling capabilities. + """ + + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] + + alias Explorer.Chain.Arbitrum.Reader.Indexer.Messages, as: Reader + + alias Explorer.Chain.Arbitrum.{ + L1Execution, + Message + } + + alias Explorer.Chain.Block, as: FullBlock + alias Explorer.Chain.Hash + + alias Indexer.Fetcher.Arbitrum.Utils.Db.Tools, as: DbTools + + require Logger + + @no_messages_warning "No messages to L2 found in DB" + @no_executions_warning "No L1 executions found in DB" + + @doc """ + Calculates the next L1 block number to search for the latest message sent to L2. + + ## Parameters + - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. + + ## Returns + - The L1 block number immediately following the latest discovered message to L2, + or `value_if_nil` if no messages to L2 have been found. + """ + @spec l1_block_to_discover_latest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_latest_message_to_l2(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_discovered_message_to_l2() do + nil -> + log_warning(@no_messages_warning) + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Calculates the next L1 block number to start the search for messages sent to L2 + that precede the earliest message already discovered. + + ## Parameters + - `value_if_nil`: The default value to return if no L1-to-L2 messages have been discovered. + + ## Returns + - The L1 block number immediately preceding the earliest discovered message to L2, + or `value_if_nil` if no messages to L2 have been found. + """ + @spec l1_block_to_discover_earliest_message_to_l2(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_message_to_l2(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_discovered_message_to_l2() do + nil -> + log_warning(@no_messages_warning) + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Determines the next L1 block number to search for the latest execution of an L2-to-L1 message. + + ## Parameters + - `value_if_nil`: The default value to return if no execution transactions for L2-to-L1 messages + have been recorded. + + ## Returns + - The L1 block number following the block that contains the latest execution transaction + for an L2-to-L1 message, or `value_if_nil` if no such executions have been found. + """ + @spec l1_block_to_discover_latest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_latest_execution(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_execution() do + nil -> + log_warning(@no_executions_warning) + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Determines the L1 block number just before the block that contains the earliest known + execution transaction for an L2-to-L1 message. + + ## Parameters + - `value_if_nil`: The default value to return if no execution transactions for + L2-to-L1 messages have been found. + + ## Returns + - The L1 block number preceding the earliest known execution transaction for + an L2-to-L1 message, or `value_if_nil` if no such executions are found in the database. + """ + @spec l1_block_to_discover_earliest_execution(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_execution(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_execution() do + nil -> + log_warning(@no_executions_warning) + value_if_nil + + value -> + value - 1 + end + end + + @doc """ + Retrieves a list of L2-to-L1 messages that have been initiated up to + a specified rollup block number. + + ## Parameters + - `block_number`: The block number up to which initiated L2-to-L1 messages + should be retrieved. + + ## Returns + - A list of maps, each representing an initiated L2-to-L1 message compatible with the + database import operation. If no initiated messages are found up to the specified + block number, an empty list is returned. + """ + @spec initiated_l2_to_l1_messages(FullBlock.block_number()) :: [Message.to_import()] + def initiated_l2_to_l1_messages(block_number) + when is_integer(block_number) and block_number >= 0 do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:initiated, block_number) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Retrieves a list of L2-to-L1 'sent' messages that have been included up to + a specified rollup block number. + + A message is considered 'sent' when there is a batch including the transaction + that initiated the message, and this batch has been successfully delivered to L1. + + ## Parameters + - `block_number`: The block number up to which sent L2-to-L1 messages are to be retrieved. + + ## Returns + - A list of maps, each representing a sent L2-to-L1 message compatible with the + database import operation. If no messages with the 'sent' status are found by + the specified block number, an empty list is returned. + """ + @spec sent_l2_to_l1_messages(FullBlock.block_number()) :: [Message.to_import()] + def sent_l2_to_l1_messages(block_number) + when is_integer(block_number) and block_number >= 0 do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:sent, block_number) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Retrieves a list of L2-to-L1 'confirmed' messages that have been included up to + a specified rollup block number. + + A message is considered 'confirmed' when its transaction was included in a rollup block, + and the confirmation of this block has been delivered to L1. + + ## Parameters + - `block_number`: The block number up to which confirmed L2-to-L1 messages are to be retrieved. + + ## Returns + - A list of maps, each representing a confirmed L2-to-L1 message compatible with the + database import operation. If no messages with the 'confirmed' status are found by + the specified block number, an empty list is returned. + """ + @spec confirmed_l2_to_l1_messages() :: [Message.to_import()] + def confirmed_l2_to_l1_messages do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.l2_to_l1_messages(:confirmed, nil) + |> Enum.map(&message_to_map/1) + end + + @doc """ + Reads a list of transactions executing L2-to-L1 messages by their IDs. + + ## Parameters + - `message_ids`: A list of IDs to retrieve executing transactions for. + + ## Returns + - A list of `Explorer.Chain.Arbitrum.L1Execution` corresponding to the message IDs from + the input list. The output list may be smaller than the input list if some IDs do not + correspond to any existing transactions. + """ + @spec l1_executions([non_neg_integer()]) :: [L1Execution.t()] + def l1_executions(message_ids) when is_list(message_ids) do + Reader.l1_executions(message_ids) + end + + @doc """ + Retrieves the transaction hashes as strings for missed L1-to-L2 messages within + a specified block range. + + The function identifies missed messages by checking transactions of specific + types that are supposed to contain L1-to-L2 messages and verifying if there are + corresponding entries in the messages table. A message is considered missed if + there is a transaction without a matching message record within the specified + block range. + + ## Parameters + - `start_block`: The starting block number of the range. + - `end_block`: The ending block number of the range. + + ## Returns + - A list of transaction hashes as strings for missed L1-to-L2 messages. + """ + @spec transactions_for_missed_messages_to_l2(non_neg_integer(), non_neg_integer()) :: [String.t()] + def transactions_for_missed_messages_to_l2(start_block, end_block) do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.transactions_for_missed_messages_to_l2(start_block, end_block) + |> Enum.map(&Hash.to_string/1) + end + + @doc """ + Retrieves the logs for missed L2-to-L1 messages within a specified block range + and converts them to maps. + + The function identifies missed messages by checking logs for the specified + L2-to-L1 event and verifying if there are corresponding entries in the messages + table. A message is considered missed if there is a log entry without a + matching message record within the specified block range. + + ## Parameters + - `start_block`: The starting block number of the range. + - `end_block`: The ending block number of the range. + + ## Returns + - A list of maps representing the logs for missed L2-to-L1 messages. + """ + @spec logs_for_missed_messages_from_l2(non_neg_integer(), non_neg_integer()) :: [ + %{ + data: String.t(), + index: non_neg_integer(), + first_topic: String.t(), + second_topic: String.t(), + third_topic: String.t(), + fourth_topic: String.t(), + address_hash: String.t(), + transaction_hash: String.t(), + block_hash: String.t(), + block_number: FullBlock.block_number() + } + ] + def logs_for_missed_messages_from_l2(start_block, end_block) do + arbsys_contract = Application.get_env(:indexer, Indexer.Fetcher.Arbitrum.Messaging)[:arbsys_contract] + + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.logs_for_missed_messages_from_l2(start_block, end_block, arbsys_contract, ArbitrumEvents.l2_to_l1()) + |> Enum.map(&logs_to_map/1) + end + + @doc """ + Retrieves the list of uncompleted L2-to-L1 messages IDs. + + ## Returns + - A list of the IDs of uncompleted L2-to-L1 messages. + """ + @spec get_uncompleted_l1_to_l2_messages_ids() :: [non_neg_integer()] + def get_uncompleted_l1_to_l2_messages_ids do + Reader.get_uncompleted_l1_to_l2_messages_ids() + end + + @spec message_to_map(Message.t()) :: Message.to_import() + defp message_to_map(message) do + [ + :direction, + :message_id, + :originator_address, + :originating_transaction_hash, + :origination_timestamp, + :originating_transaction_block_number, + :completion_transaction_hash, + :status + ] + |> DbTools.db_record_to_map(message) + end + + defp logs_to_map(log) do + [ + :data, + :index, + :first_topic, + :second_topic, + :third_topic, + :fourth_topic, + :address_hash, + :transaction_hash, + :block_hash, + :block_number + ] + |> DbTools.db_record_to_map(log, true) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/parent_chain_transactions.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/parent_chain_transactions.ex new file mode 100644 index 000000000000..744b8dfcf902 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/parent_chain_transactions.ex @@ -0,0 +1,139 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db.ParentChainTransactions do + @moduledoc """ + Manages database operations for Arbitrum L1 (parent chain) lifecycle transactions. + + This module handles indexing and retrieval of L1 transactions that affect the Arbitrum + rollup state, including: + * Batch commitment transactions from the sequencer + * State root confirmation transactions post fraud-proof window + * User-initiated cross-chain message transactions + + Provides functionality to: + * Index new L1 transactions with sequential IDs + * Retrieve transaction data by hash + * Convert database records to import-compatible format + * Track transaction finalization status + """ + + alias Explorer.Chain.Arbitrum.LifecycleTransaction + alias Explorer.Chain.Arbitrum.Reader.Indexer.ParentChainTransactions, as: Reader + alias Explorer.Chain.Block, as: FullBlock + alias Indexer.Fetcher.Arbitrum.Utils.Db.Tools, as: DbTools + + require Logger + + @doc """ + Indexes L1 transactions provided in the input map. For transactions that + are already in the database, existing indices are taken. For new transactions, + the next available indices are assigned. + + ## Parameters + - `new_l1_transactions`: A map of L1 transaction descriptions. The keys of the map are + transaction hashes. + + ## Returns + - `l1_transactions`: A map of L1 transaction descriptions. Each element is extended with + the key `:id`, representing the index of the L1 transaction in the + `arbitrum_lifecycle_l1_transactions` table. + """ + @spec get_indices_for_l1_transactions(%{ + binary() => %{ + :hash => binary(), + :block_number => FullBlock.block_number(), + :timestamp => DateTime.t(), + :status => :unfinalized | :finalized, + optional(:id) => non_neg_integer() + } + }) :: %{binary() => LifecycleTransaction.to_import()} + # TODO: consider a way to remove duplicate with ZkSync.Utils.Db + def get_indices_for_l1_transactions(new_l1_transactions) + when is_map(new_l1_transactions) do + # Get indices for l1 transactions previously handled + l1_transactions = + new_l1_transactions + |> Map.keys() + |> Reader.lifecycle_transaction_ids() + |> Enum.reduce(new_l1_transactions, fn {hash, id}, transactions -> + {_, transactions} = + Map.get_and_update!(transactions, hash.bytes, fn l1_transaction -> + {l1_transaction, Map.put(l1_transaction, :id, id)} + end) + + transactions + end) + + # Get the next index for the first new transaction based + # on the indices existing in DB + l1_transaction_next_id = Reader.next_lifecycle_transaction_id() + + # Assign new indices for the transactions which are not in + # the l1 transactions table yet + {updated_l1_transactions, _} = + l1_transactions + |> Map.keys() + |> Enum.reduce( + {l1_transactions, l1_transaction_next_id}, + fn hash, {transactions, next_id} -> + transaction = transactions[hash] + id = Map.get(transaction, :id) + + if is_nil(id) do + {Map.put(transactions, hash, Map.put(transaction, :id, next_id)), next_id + 1} + else + {transactions, next_id} + end + end + ) + + updated_l1_transactions + end + + @doc """ + Reads a list of L1 transactions by their hashes from the + `arbitrum_lifecycle_l1_transactions` table and converts them to maps. + + ## Parameters + - `l1_transaction_hashes`: A list of hashes to retrieve L1 transactions for. + + ## Returns + - A list of maps representing the `Explorer.Chain.Arbitrum.LifecycleTransaction` + corresponding to the hashes from the input list. The output list is + compatible with the database import operation. + """ + @spec lifecycle_transactions([binary()]) :: [LifecycleTransaction.to_import()] + def lifecycle_transactions(l1_transaction_hashes) do + l1_transaction_hashes + |> Reader.lifecycle_transactions() + |> Enum.map(&lifecycle_transaction_to_map/1) + end + + @doc """ + Retrieves unfinalized L1 transactions that are involved in changing the statuses + of rollup blocks or transactions. + + An L1 transaction is considered unfinalized if it has not yet reached a state + where it is permanently included in the blockchain, meaning it is still susceptible + to potential reorganization or change. Transactions are evaluated against + the finalized_block parameter to determine their finalized status. + + ## Parameters + - `finalized_block`: The block number up to which unfinalized transactions are to be retrieved. + + ## Returns + - A list of maps representing unfinalized L1 transactions and compatible with the + database import operation. + """ + @spec lifecycle_unfinalized_transactions(FullBlock.block_number()) :: [LifecycleTransaction.to_import()] + def lifecycle_unfinalized_transactions(finalized_block) + when is_integer(finalized_block) and finalized_block >= 0 do + finalized_block + |> Reader.lifecycle_unfinalized_transactions() + |> Enum.map(&lifecycle_transaction_to_map/1) + end + + @spec lifecycle_transaction_to_map(LifecycleTransaction.t()) :: LifecycleTransaction.to_import() + defp lifecycle_transaction_to_map(transaction) do + [:id, :hash, :block_number, :timestamp, :status] + |> DbTools.db_record_to_map(transaction) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/settlement.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/settlement.ex new file mode 100644 index 000000000000..045a2578aaa0 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/settlement.ex @@ -0,0 +1,490 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db.Settlement do + @moduledoc """ + Provides utility functions for querying Arbitrum rollup settlement data. + + This module serves as a wrapper around the database reader functions from + `Explorer.Chain.Arbitrum.Reader.Indexer.Settlement`, providing additional data + transformation and error handling capabilities for: + + * L1 batches - Sequential groups of L2 blocks committed to L1 + * Batch blocks - Individual L2 blocks included in L1 batches + * Block confirmations - L1 transactions confirming L2 block states + * Data availability records - Additional batch-related data (e.g., AnyTrust keysets) + """ + + @no_committed_batches_warning "No committed batches found in DB" + + import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1] + + alias Explorer.Chain.Arbitrum + alias Explorer.Chain.Arbitrum.Reader.Indexer.Settlement, as: Reader + alias Explorer.Chain.Block, as: FullBlock + + alias Indexer.Fetcher.Arbitrum.Utils.Db.Tools, as: DbTools + + require Logger + + @doc """ + Calculates the next L1 block number to search for the latest committed batch. + + ## Parameters + - `value_if_nil`: The default value to return if no committed batch is found. + + ## Returns + - The next L1 block number after the latest committed batch or `value_if_nil` if no committed batches are found. + """ + @spec l1_block_to_discover_latest_committed_batch(FullBlock.block_number() | nil) :: FullBlock.block_number() | nil + def l1_block_to_discover_latest_committed_batch(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_committed_batch() do + nil -> + log_warning(@no_committed_batches_warning) + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Calculates the L1 block number to start the search for committed batches. + + Returns the block number of the earliest L1 block containing a transaction + that commits a batch, as found in the database. If no committed batches are + found, it returns a default value. It's safe to use the returned block number + for subsequent searches, even if it corresponds to a block we've previously + processed. This is because multiple transactions committing different batches + can exist within the same block, and revisiting this block ensures no batches + are missed. + + The batch discovery process is expected to handle potential duplicates + correctly without creating redundant database entries. + + ## Parameters + - `value_if_nil`: The default value to return if no committed batch is found. + + ## Returns + - The L1 block number containing the earliest committed batch or `value_if_nil`. + """ + @spec l1_block_to_discover_earliest_committed_batch(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_to_discover_earliest_committed_batch(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_earliest_committed_batch() do + nil -> + log_warning(@no_committed_batches_warning) + value_if_nil + + value -> + value + end + end + + @doc """ + Retrieves the block number of the highest rollup block that has been included in a batch. + + ## Parameters + - `value_if_nil`: The default value to return if no rollup batches are found. + + ## Returns + - The number of the highest rollup block included in a batch + or `value_if_nil` if no rollup batches are found. + """ + @spec highest_committed_block(nil | integer()) :: nil | FullBlock.block_number() + def highest_committed_block(value_if_nil) + when is_integer(value_if_nil) or is_nil(value_if_nil) do + case Reader.highest_committed_block() do + nil -> value_if_nil + value -> value + end + end + + @doc """ + Retrieves the L1 block number immediately following the block where the confirmation transaction + for the highest confirmed rollup block was included. + + ## Parameters + - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. + + ## Returns + - The L1 block number immediately after the block containing the confirmation transaction of + the highest confirmed rollup block, or `value_if_nil` if no confirmed rollup blocks are present. + """ + @spec l1_block_of_latest_confirmed_block(nil | FullBlock.block_number()) :: nil | FullBlock.block_number() + def l1_block_of_latest_confirmed_block(value_if_nil) + when (is_integer(value_if_nil) and value_if_nil >= 0) or is_nil(value_if_nil) do + case Reader.l1_block_of_latest_confirmed_block() do + nil -> + log_warning("No confirmed blocks found in DB") + value_if_nil + + value -> + value + 1 + end + end + + @doc """ + Retrieves the block number of the highest rollup block for which a confirmation transaction + has been sent to L1. + + ## Parameters + - `value_if_nil`: The default value to return if no confirmed rollup blocks are found. + + ## Returns + - The block number of the highest confirmed rollup block, + or `value_if_nil` if no confirmed rollup blocks are found in the database. + """ + @spec highest_confirmed_block(nil | integer()) :: nil | FullBlock.block_number() + def highest_confirmed_block(value_if_nil) + when is_integer(value_if_nil) or is_nil(value_if_nil) do + case Reader.highest_confirmed_block() do + nil -> value_if_nil + value -> value + end + end + + @doc """ + Retrieves the block number associated with a specific hash of a rollup block. + + ## Parameters + - `hash`: The hash of the rollup block whose number is to be retrieved. + + ## Returns + - The block number associated with the given rollup block hash. + """ + @spec rollup_block_hash_to_num(binary()) :: FullBlock.block_number() | nil + def rollup_block_hash_to_num(hash) when is_binary(hash) do + Reader.rollup_block_hash_to_num(hash) + end + + @doc """ + Retrieves the L1 batch that includes a specified rollup block number. + + ## Parameters + - `num`: The block number of the rollup block for which the containing + L1 batch is to be retrieved. + + ## Returns + - The `Explorer.Chain.Arbitrum.L1Batch` associated with the given rollup block number + if it exists and its commit transaction is loaded. + """ + @spec get_batch_by_rollup_block_number(FullBlock.block_number()) :: Arbitrum.L1Batch.t() | nil + def get_batch_by_rollup_block_number(num) + when is_integer(num) and num >= 0 do + case Reader.get_batch_by_rollup_block_number(num) do + nil -> + nil + + batch -> + case batch.commitment_transaction do + nil -> + raise commitment_transaction_not_loaded_error(num) + + %Ecto.Association.NotLoaded{} -> + raise commitment_transaction_not_loaded_error(num) + + _ -> + batch + end + end + end + + # Constructs an error message for when a commitment transaction is not loaded + @spec commitment_transaction_not_loaded_error(FullBlock.block_number()) :: String.t() + defp commitment_transaction_not_loaded_error(batch_number) do + "Incorrect state of the DB: commitment_transaction is not loaded for the batch with number #{batch_number}" + end + + @doc """ + Retrieves a batch by its number. + + ## Parameters + - `number`: The number of a rollup batch. + + ## Returns + - An instance of `Explorer.Chain.Arbitrum.L1Batch`, or `nil` if no batch with + such a number is found. + """ + @spec get_batch_by_number(non_neg_integer()) :: Arbitrum.L1Batch.t() | nil + def get_batch_by_number(number) do + Reader.get_batch_by_number(number) + end + + @doc """ + Retrieves rollup blocks within a specified block range that have not yet been confirmed. + + ## Parameters + - `first_block`: The starting block number of the range to search for unconfirmed rollup blocks. + - `last_block`: The ending block number of the range. + + ## Returns + - A list of maps, each representing an unconfirmed rollup block within the specified range. + If no unconfirmed blocks are found within the range, an empty list is returned. + """ + @spec unconfirmed_rollup_blocks(FullBlock.block_number(), FullBlock.block_number()) :: [ + Arbitrum.BatchBlock.to_import() + ] + def unconfirmed_rollup_blocks(first_block, last_block) + when is_integer(first_block) and first_block >= 0 and + is_integer(last_block) and first_block <= last_block do + # credo:disable-for-lines:2 Credo.Check.Refactor.PipeChainStart + Reader.unconfirmed_rollup_blocks(first_block, last_block) + |> Enum.map(&rollup_block_to_map/1) + end + + @doc """ + Counts the number of confirmed rollup blocks in a specified batch. + + ## Parameters + - `batch_number`: The batch number for which the count of confirmed rollup blocks + is to be determined. + + ## Returns + - A number of rollup blocks confirmed in the specified batch. + """ + @spec count_confirmed_rollup_blocks_in_batch(non_neg_integer()) :: non_neg_integer() + def count_confirmed_rollup_blocks_in_batch(batch_number) + when is_integer(batch_number) and batch_number >= 0 do + Reader.count_confirmed_rollup_blocks_in_batch(batch_number) + end + + @doc """ + Checks if the numbers from the provided list correspond to the numbers of indexed batches. + + ## Parameters + - `batches_numbers`: The list of batch numbers. + + ## Returns + - A list of batch numbers that are indexed and match the provided list, or `[]` + if none of the batch numbers in the provided list exist in the database. The output list + may be smaller than the input list. + """ + @spec batches_exist([non_neg_integer()]) :: [non_neg_integer()] + def batches_exist(batches_numbers) when is_list(batches_numbers) do + Reader.batches_exist(batches_numbers) + end + + @doc """ + Identifies the range of L1 blocks to investigate for missing confirmations of rollup blocks. + + This function determines the L1 block numbers bounding the interval where gaps in rollup block + confirmations might exist. It uses the earliest and latest L1 block numbers associated with + unconfirmed rollup blocks to define this range. + + ## Parameters + - `right_pos_value_if_nil`: The default value to use for the upper bound of the range if no + confirmed blocks found. + + ## Returns + - A tuple containing two elements: the lower and upper bounds of L1 block numbers to check + for missing rollup block confirmations. If the necessary confirmation data is unavailable, + the first element will be `nil`, and the second will be `right_pos_value_if_nil`. + """ + @spec l1_blocks_to_expect_rollup_blocks_confirmation(nil | FullBlock.block_number()) :: + {nil | FullBlock.block_number(), nil | FullBlock.block_number()} + def l1_blocks_to_expect_rollup_blocks_confirmation(right_pos_value_if_nil) + when (is_integer(right_pos_value_if_nil) and right_pos_value_if_nil >= 0) or is_nil(right_pos_value_if_nil) do + case Reader.l1_blocks_of_confirmations_bounding_first_unconfirmed_rollup_blocks_gap() do + nil -> + log_warning("No L1 confirmations found in DB") + {nil, right_pos_value_if_nil} + + {nil, newer_confirmation_l1_block} -> + {nil, newer_confirmation_l1_block - 1} + + {older_confirmation_l1_block, newer_confirmation_l1_block} -> + {older_confirmation_l1_block + 1, newer_confirmation_l1_block - 1} + end + end + + @doc """ + Retrieves L1 block ranges that could be used to re-discover missing batches + within a specified range of batch numbers. + + This function identifies the L1 block ranges corresponding to missing L1 batches + within the given range of batch numbers. It first finds the missing batches, + then determines their neighboring ranges, and finally maps these ranges to the + corresponding L1 block numbers. + + ## Parameters + - `start_batch_number`: The starting batch number of the search range. + - `end_batch_number`: The ending batch number of the search range. + - `block_for_batch_0`: The L1 block number corresponding to the batch number 0. + + ## Returns + - A list of tuples, each containing a start and end L1 block number for the + ranges corresponding to the missing batches. + + ## Examples + + Example #1 + - Within the range from 1 to 10, the missing batch is 2. The L1 block for the + batch #1 is 10, and the L1 block for the batch #3 is 31. + - The output will be `[{10, 31}]`. + + Example #2 + - Within the range from 1 to 10, the missing batches are 2 and 6, and + - The L1 block for the batch #1 is 10. + - The L1 block for the batch #3 is 31. + - The L1 block for the batch #5 is 64. + - The L1 block for the batch #7 is 90. + - The output will be `[{10, 31}, {64, 90}]`. + + Example #3 + - Within the range from 1 to 10, the missing batches are 2 and 4, and + - The L1 block for the batch #1 is 10. + - The L1 block for the batch #3 is 31. + - The L1 block for the batch #5 is 64. + - The output will be `[{10, 31}, {32, 64}]`. + + Example #4 + - Within the range from 1 to 10, the missing batches are 2 and 4, and + - The L1 block for the batch #1 is 10. + - The L1 block for the batch #3 is 31. + - The L1 block for the batch #5 is 31. + - The output will be `[{10, 31}]`. + """ + @spec get_l1_block_ranges_for_missing_batches(non_neg_integer(), non_neg_integer(), FullBlock.block_number()) :: [ + {FullBlock.block_number(), FullBlock.block_number()} + ] + def get_l1_block_ranges_for_missing_batches(start_batch_number, end_batch_number, block_for_batch_0) + when is_integer(start_batch_number) and is_integer(end_batch_number) and end_batch_number >= start_batch_number do + # credo:disable-for-lines:4 Credo.Check.Refactor.PipeChainStart + neighbors_of_missing_batches = + Reader.find_missing_batches(start_batch_number, end_batch_number) + |> list_to_chunks() + |> chunks_to_neighbor_ranges() + + batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) + end + + # Splits a list into chunks of consecutive numbers, e.g., [1, 2, 3, 5, 6, 8] becomes [[1, 2, 3], [5, 6], [8]]. + @spec list_to_chunks([non_neg_integer()]) :: [[non_neg_integer()]] + defp list_to_chunks(list) do + chunk_fun = fn current, acc -> + case acc do + [] -> + {:cont, [current]} + + [last | _] = acc when current == last + 1 -> + {:cont, [current | acc]} + + acc -> + {:cont, Enum.reverse(acc), [current]} + end + end + + after_fun = fn acc -> + case acc do + # Special case to handle the situation when the initial list is empty + [] -> {:cont, []} + _ -> {:cont, Enum.reverse(acc), []} + end + end + + list + |> Enum.chunk_while([], chunk_fun, after_fun) + end + + # Converts chunks of elements into neighboring ranges, e.g., [[1, 2], [4]] becomes [{0, 3}, {3, 5}]. + @spec chunks_to_neighbor_ranges([[non_neg_integer()]]) :: [{non_neg_integer(), non_neg_integer()}] + defp chunks_to_neighbor_ranges([]), do: [] + + defp chunks_to_neighbor_ranges(list_of_chunks) do + list_of_chunks + |> Enum.map(fn current -> + case current do + [one_element] -> {one_element - 1, one_element + 1} + chunk -> {List.first(chunk) - 1, List.last(chunk) + 1} + end + end) + end + + # Converts batch number gaps to L1 block ranges for missing batches discovery. + # + # This function takes a list of neighboring batch number ranges representing gaps + # in the batch sequence and converts them to corresponding L1 block ranges. These + # L1 block ranges can be used to rediscover missing batches. + # + # ## Parameters + # - `neighbors_of_missing_batches`: A list of tuples, each containing the start + # and end batch numbers of a gap in the batch sequence. + # - `block_for_batch_0`: The L1 block number corresponding to batch number 0. + # + # ## Returns + # - A list of tuples, each containing the start and end L1 block numbers for + # ranges where missing batches should be rediscovered. + @spec batches_gaps_to_block_ranges([{non_neg_integer(), non_neg_integer()}], FullBlock.block_number()) :: + [{FullBlock.block_number(), FullBlock.block_number()}] + defp batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) + + defp batches_gaps_to_block_ranges([], _), do: [] + + defp batches_gaps_to_block_ranges(neighbors_of_missing_batches, block_for_batch_0) do + l1_blocks = + neighbors_of_missing_batches + |> Enum.reduce(MapSet.new(), fn {start_batch, end_batch}, acc -> + acc + |> MapSet.put(start_batch) + |> MapSet.put(end_batch) + end) + # To avoid error in getting L1 block for the batch 0 + |> MapSet.delete(0) + |> MapSet.to_list() + |> Reader.get_l1_blocks_of_batches_by_numbers() + # It is safe to add the block for the batch 0 even if the batch 1 is missing + |> Map.put(0, block_for_batch_0) + + neighbors_of_missing_batches + |> Enum.reduce({[], %{}}, fn {start_batch, end_batch}, {res, blocks_used} -> + range_start = l1_blocks[start_batch] + range_end = l1_blocks[end_batch] + # If the batch's block was already used as a block finishing one of the ranges + # then we should start another range from the next block to avoid discovering + # the same batches batches again. + case {Map.get(blocks_used, range_start, false), range_start == range_end} do + {true, true} -> + # Edge case when the range consists of a single block (several batches in + # the same block) which is going to be inspected up to this moment. + {res, blocks_used} + + {true, false} -> + {[{range_start + 1, range_end} | res], Map.put(blocks_used, range_end, true)} + + {false, _} -> + {[{range_start, range_end} | res], Map.put(blocks_used, range_end, true)} + end + end) + |> elem(0) + end + + @doc """ + Retrieves the minimum and maximum batch numbers of L1 batches. + + ## Returns + - A tuple containing the minimum and maximum batch numbers or `{nil, nil}` if no batches are found. + """ + @spec get_min_max_batch_numbers() :: {non_neg_integer(), non_neg_integer()} | {nil | nil} + def get_min_max_batch_numbers do + Reader.get_min_max_batch_numbers() + end + + @doc """ + Checks if an AnyTrust keyset exists in the database using the provided keyset hash. + + ## Parameters + - `keyset_hash`: The hash of the keyset to be checked. + + ## Returns + - `true` if the keyset exists, `false` otherwise. + """ + @spec anytrust_keyset_exists?(binary()) :: boolean() + def anytrust_keyset_exists?(keyset_hash) do + not Enum.empty?(Reader.get_anytrust_keyset(keyset_hash)) + end + + @spec rollup_block_to_map(Arbitrum.BatchBlock.t()) :: Arbitrum.BatchBlock.to_import() + defp rollup_block_to_map(block) do + [:batch_number, :block_number, :confirmation_id] + |> DbTools.db_record_to_map(block) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/tools.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/tools.ex new file mode 100644 index 000000000000..e3eb7cf4c587 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/db/tools.ex @@ -0,0 +1,49 @@ +defmodule Indexer.Fetcher.Arbitrum.Utils.Db.Tools do + @moduledoc """ + Internal database utility functions for Arbitrum-related data processing. + + This module is designed to be used exclusively within the + `Indexer.Fetcher.Arbitrum.Utils.Db` namespace. + """ + + alias Explorer.Chain.{Data, Hash} + + @doc """ + Converts an Arbitrum-related database record to a map with specified keys and optional encoding. + + This function is used to transform various Arbitrum-specific database records + (such as LifecycleTransaction, BatchBlock, or Message) into maps containing + only the specified keys. It's particularly useful for preparing data for + import or further processing of Arbitrum blockchain data. + + Parameters: + - `required_keys`: A list of atoms representing the keys to include in the + output map. + - `record`: The database record or struct to be converted. + - `encode`: Boolean flag to determine if Hash and Data types should be + encoded to strings (default: false). When true, Hash and Data are + converted to string representations; otherwise, their raw bytes are used. + + Returns: + - A map containing only the required keys from the input record. Hash and + Data types are either encoded to strings or left as raw bytes based on + the `encode` parameter. + """ + @spec db_record_to_map([atom()], map(), boolean()) :: map() + def db_record_to_map(required_keys, record, encode \\ false) do + required_keys + |> Enum.reduce(%{}, fn key, record_as_map -> + raw_value = Map.get(record, key) + + # credo:disable-for-lines:5 Credo.Check.Refactor.Nesting + value = + case raw_value do + %Hash{} -> if(encode, do: Hash.to_string(raw_value), else: raw_value.bytes) + %Data{} -> if(encode, do: Data.to_string(raw_value), else: raw_value.bytes) + _ -> raw_value + end + + Map.put(record_as_map, key, value) + end) + end +end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex index b831022bbcf2..1859a47f1c7d 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/utils/rpc.ex @@ -6,6 +6,8 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do import EthereumJSONRPC, only: [json_rpc: 2, quantity_to_integer: 1, timestamp_to_datetime: 1] + alias EthereumJSONRPC.Arbitrum.Constants.Contracts, as: ArbitrumContracts + alias EthereumJSONRPC.Transport alias Indexer.Helper, as: IndexerHelper @@ -14,90 +16,6 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do @default_binary_search_threshold 1000 - # outbox() - @selector_outbox "ce11e6ab" - # sequencerInbox() - @selector_sequencer_inbox "ee35f327" - # bridge() - @selector_bridge "e78cea92" - @rollup_contract_abi [ - %{ - "inputs" => [], - "name" => "outbox", - "outputs" => [ - %{ - "internalType" => "address", - "name" => "", - "type" => "address" - } - ], - "stateMutability" => "view", - "type" => "function" - }, - %{ - "inputs" => [], - "name" => "sequencerInbox", - "outputs" => [ - %{ - "internalType" => "address", - "name" => "", - "type" => "address" - } - ], - "stateMutability" => "view", - "type" => "function" - }, - %{ - "inputs" => [], - "name" => "bridge", - "outputs" => [ - %{ - "internalType" => "address", - "name" => "", - "type" => "address" - } - ], - "stateMutability" => "view", - "type" => "function" - } - ] - - # getKeysetCreationBlock(bytes32 ksHash) - @selector_get_keyset_creation_block "258f0495" - @selector_sequencer_inbox_contract_abi [ - %{ - "inputs" => [%{"internalType" => "bytes32", "name" => "ksHash", "type" => "bytes32"}], - "name" => "getKeysetCreationBlock", - "outputs" => [%{"internalType" => "uint256", "name" => "", "type" => "uint256"}], - "stateMutability" => "view", - "type" => "function" - } - ] - - # findBatchContainingBlock(uint64 blockNum) - @selector_find_batch_containing_block "81f1adaf" - @node_interface_contract_abi [ - %{ - "inputs" => [ - %{ - "internalType" => "uint64", - "name" => "blockNum", - "type" => "uint64" - } - ], - "name" => "findBatchContainingBlock", - "outputs" => [ - %{ - "internalType" => "uint64", - "name" => "batch", - "type" => "uint64" - } - ], - "stateMutability" => "view", - "type" => "function" - } - ] - @doc """ Constructs a JSON RPC request to retrieve a transaction by its hash. @@ -116,45 +34,6 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do EthereumJSONRPC.request(%{id: id, method: "eth_getTransactionByHash", params: [transaction_hash]}) end - @doc """ - Retrieves specific contract addresses associated with Arbitrum rollup contract. - - This function fetches the addresses of the bridge, sequencer inbox, and outbox - contracts related to the specified Arbitrum rollup address. It invokes one of - the contract methods `bridge()`, `sequencerInbox()`, or `outbox()` based on - the `contracts_set` parameter to obtain the required information. - - ## Parameters - - `rollup_address`: The address of the Arbitrum rollup contract from which - information is being retrieved. - - `contracts_set`: A symbol indicating the set of contracts to retrieve (`:bridge` - for the bridge contract, `:inbox_outbox` for the sequencer - inbox and outbox contracts). - - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. - - ## Returns - - A map with keys corresponding to the contract types (`:bridge`, `:sequencer_inbox`, - `:outbox`) and values representing the contract addresses. - """ - @spec get_contracts_for_rollup( - EthereumJSONRPC.address(), - :bridge | :inbox_outbox, - EthereumJSONRPC.json_rpc_named_arguments() - ) :: %{(:bridge | :sequencer_inbox | :outbox) => binary()} - def get_contracts_for_rollup(rollup_address, contracts_set, json_rpc_named_arguments) - - def get_contracts_for_rollup(rollup_address, :bridge, json_rpc_named_arguments) do - call_simple_getters_in_rollup_contract(rollup_address, [@selector_bridge], json_rpc_named_arguments) - end - - def get_contracts_for_rollup(rollup_address, :inbox_outbox, json_rpc_named_arguments) do - call_simple_getters_in_rollup_contract( - rollup_address, - [@selector_sequencer_inbox, @selector_outbox], - json_rpc_named_arguments - ) - end - @doc """ Retrieves the block number associated with a specific keyset from the Sequencer Inbox contract. @@ -177,45 +56,13 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do def get_block_number_for_keyset(sequencer_inbox_address, keyset_hash, json_rpc_named_arguments) do read_contract_and_handle_result_as_integer( sequencer_inbox_address, - @selector_get_keyset_creation_block, + ArbitrumContracts.get_keyset_creation_block_selector(), [keyset_hash], - @selector_sequencer_inbox_contract_abi, + ArbitrumContracts.sequencer_inbox_contract_abi(), json_rpc_named_arguments ) end - # Calls getter functions on a rollup contract and collects their return values. - # - # This function is designed to interact with a rollup contract and invoke specified getter methods. - # It creates a list of requests for each method ID, executes these requests with retries as needed, - # and then maps the results to the corresponding method IDs. - # - # ## Parameters - # - `rollup_address`: The address of the rollup contract to interact with. - # - `method_ids`: A list of method identifiers representing the getter functions to be called. - # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. - # - # ## Returns - # - A map where each key is a method identifier converted to an atom, and each value is the - # response from calling the respective method on the contract. - defp call_simple_getters_in_rollup_contract(rollup_address, method_ids, json_rpc_named_arguments) do - method_ids - |> Enum.map(fn method_id -> - %{ - contract_address: rollup_address, - method_id: method_id, - args: [] - } - end) - |> IndexerHelper.read_contracts_with_retries(@rollup_contract_abi, json_rpc_named_arguments, @rpc_resend_attempts) - # Extracts the list of responses from the tuple returned by read_contracts_with_retries. - |> Kernel.elem(0) - |> Enum.zip(method_ids) - |> Enum.reduce(%{}, fn {{:ok, [response]}, method_id}, retval -> - Map.put(retval, atomized_key(method_id), response) - end) - end - @doc """ Executes a batch of RPC calls and returns a list of response bodies. @@ -710,9 +557,9 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do batch_number = read_contract_and_handle_result_as_integer( node_interface_address, - @selector_find_batch_containing_block, + ArbitrumContracts.find_batch_containing_block_selector(), [block_number], - @node_interface_contract_abi, + ArbitrumContracts.node_interface_contract_abi(), json_rpc_named_arguments ) @@ -726,7 +573,7 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do [term()], [map()], EthereumJSONRPC.json_rpc_named_arguments() - ) :: non_neg_integer() + ) :: non_neg_integer() | boolean() defp read_contract_and_handle_result_as_integer( contract_address, method_selector, @@ -786,8 +633,4 @@ defmodule Indexer.Fetcher.Arbitrum.Utils.Rpc do def get_resend_attempts do @rpc_resend_attempts end - - defp atomized_key(@selector_outbox), do: :outbox - defp atomized_key(@selector_sequencer_inbox), do: :sequencer_inbox - defp atomized_key(@selector_bridge), do: :bridge end diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex index f3b3386f9e93..2803b7477b29 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/historical_messages_on_l2.ex @@ -20,7 +20,9 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do alias Indexer.Fetcher.Arbitrum.MessagesToL2Matcher, as: ArbitrumMessagesToL2Matcher alias Indexer.Fetcher.Arbitrum.Messaging - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Db.Common, as: DbCommon + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Rpc require Logger @@ -88,7 +90,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do when is_integer(end_block) do start_block = max(rollup_first_block, end_block - missed_messages_blocks_depth + 1) - if Db.indexed_blocks?(start_block, end_block) do + if DbCommon.indexed_blocks?(start_block, end_block) do do_discover_historical_messages_from_l2(start_block, end_block) else log_warning( @@ -119,7 +121,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do defp do_discover_historical_messages_from_l2(start_block, end_block) do log_info("Block range for discovery historical messages from L2: #{start_block}..#{end_block}") - logs = Db.logs_for_missed_messages_from_l2(start_block, end_block) + logs = DbMessages.logs_for_missed_messages_from_l2(start_block, end_block) unless logs == [] do messages = @@ -203,7 +205,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do # Although indexing blocks is not necessary to determine the completion of L1-to-L2 messages, # for database consistency, it is preferable to delay marking these messages as completed. - if Db.indexed_blocks?(start_block, end_block) do + if DbCommon.indexed_blocks?(start_block, end_block) do do_discover_historical_messages_to_l2(start_block, end_block, config) else log_warning( @@ -259,7 +261,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.HistoricalMessagesOnL2 do ) do log_info("Block range for discovery historical messages to L2: #{start_block}..#{end_block}") - transactions = Db.transactions_for_missed_messages_to_l2(start_block, end_block) + transactions = DbMessages.transactions_for_missed_messages_to_l2(start_block, end_block) transactions_length = length(transactions) if transactions_length > 0 do diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex index 3230664d6a76..d7b4acf09233 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/l1_finalization.ex @@ -10,8 +10,9 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.L1Finalization do import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1] + alias Indexer.Fetcher.Arbitrum.Utils.Db.ParentChainTransactions, as: Db + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Indexer.Helper, as: IndexerHelper - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} alias Explorer.Chain diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex index 42eb3367df0d..3bfd5b5d2dfb 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_batches.ex @@ -22,9 +22,11 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do for the necessary information not available in the logs. """ - alias ABI.{FunctionSelector, TypeDecoder} + alias ABI.TypeDecoder import EthereumJSONRPC, only: [quantity_to_integer: 1] + alias EthereumJSONRPC.Arbitrum.Constants.Contracts, as: ArbitrumContracts + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] @@ -32,8 +34,12 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do alias Indexer.Fetcher.Arbitrum.DA.Common, as: DataAvailabilityInfo alias Indexer.Fetcher.Arbitrum.DA.{Anytrust, Celestia} - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Logging, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Db.Common, as: DbCommon + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Db.ParentChainTransactions, as: DbParentChainTransactions + alias Indexer.Fetcher.Arbitrum.Utils.Db.Settlement, as: DbSettlement alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper + alias Indexer.Fetcher.Arbitrum.Utils.{Logging, Rpc} alias Indexer.Helper, as: IndexerHelper alias Explorer.Chain @@ -42,9 +48,6 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do require Logger - # keccak256("SequencerBatchDelivered(uint256,bytes32,bytes32,bytes32,uint256,(uint64,uint64,uint64,uint64),uint8)") - @event_sequencer_batch_delivered "0x7394f4a19a13c7b92b5bb71033245305946ef78452f7b4986ac1390b5df4ebd7" - @doc """ Discovers and imports new batches of rollup transactions within the current L1 block range. @@ -330,7 +333,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do log_info("Batch range for missing batches inspection: #{start_batch}..#{end_batch}") l1_block_ranges_for_missing_batches = - Db.get_l1_block_ranges_for_missing_batches(start_batch, end_batch, l1_rollup_init_block - 1) + DbSettlement.get_l1_block_ranges_for_missing_batches(start_batch, end_batch, l1_rollup_init_block - 1) unless l1_block_ranges_for_missing_batches == [] do discover_missing_batches( @@ -607,7 +610,8 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do logs |> Enum.chunk_every(new_batches_limit) |> Enum.each(fn chunked_logs -> - {batches, lifecycle_transactions, rollup_blocks, rollup_transactions, committed_transactions, da_records} = + {batches, lifecycle_transactions, rollup_blocks, rollup_transactions, committed_transactions, da_records, + batch_to_data_blobs} = handle_batches_from_logs( chunked_logs, messages_to_blocks_shift, @@ -625,6 +629,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do arbitrum_batch_transactions: %{params: rollup_transactions}, arbitrum_messages: %{params: committed_transactions}, arbitrum_da_multi_purpose_records: %{params: da_records}, + arbitrum_batches_to_da_blobs: %{params: batch_to_data_blobs}, timeout: :infinity }) @@ -659,7 +664,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do start_block, end_block, sequencer_inbox_address, - [@event_sequencer_batch_delivered], + [ArbitrumEvents.sequencer_batch_delivered()], json_rpc_named_arguments ) @@ -694,8 +699,9 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do # # ## Returns # - A tuple containing lists of batches, lifecycle transactions, rollup blocks, - # rollup transactions, committed messages (with the status `:sent`), and records - # with DA-related information if applicable, all ready for database import. + # rollup transactions, committed messages (with the status `:sent`), records + # with DA-related information if applicable, and batch-to-DA-blob associations, + # all ready for database import. @spec handle_batches_from_logs( [%{String.t() => any()}], non_neg_integer(), @@ -717,7 +723,8 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do [Arbitrum.BatchBlock.to_import()], [Arbitrum.BatchTransaction.to_import()], [Arbitrum.Message.to_import()], - [Arbitrum.DaMultiPurposeRecord.to_import()] + [Arbitrum.DaMultiPurposeRecord.to_import()], + [Arbitrum.BatchToDaBlob.to_import()] } defp handle_batches_from_logs( logs, @@ -728,7 +735,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do rollup_rpc_config ) - defp handle_batches_from_logs([], _, _, _, _, _), do: {[], [], [], [], [], []} + defp handle_batches_from_logs([], _, _, _, _, _), do: {[], [], [], [], [], [], []} defp handle_batches_from_logs( logs, @@ -744,7 +751,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do existing_batches = logs |> parse_logs_to_get_batch_numbers() - |> Db.batches_exist() + |> DbSettlement.batches_exist() {batches, transactions_requests, blocks_requests, existing_commitment_transactions} = parse_logs_for_new_batches(logs, existing_batches) @@ -775,7 +782,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do lifecycle_transactions = lifecycle_transactions_wo_indices - |> Db.get_indices_for_l1_transactions() + |> DbParentChainTransactions.get_indices_for_l1_transactions() transaction_counts_per_batch = batches_to_rollup_transactions_amounts(rollup_transactions_to_import) @@ -798,7 +805,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do ] end) - da_records = + {da_records, batch_to_data_blobs} = DataAvailabilityInfo.prepare_for_import(da_info, %{ sequencer_inbox_address: sequencer_inbox_address, json_rpc_named_arguments: l1_rpc_config.json_rpc_named_arguments @@ -817,7 +824,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do end {batches_list_to_import, Map.values(lifecycle_transactions), Map.values(blocks_to_import), - rollup_transactions_to_import, committed_messages, da_records} + rollup_transactions_to_import, committed_messages, da_records, batch_to_data_blobs} end # Extracts batch numbers from logs of SequencerBatchDelivered events. @@ -1150,17 +1157,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do [sequence_number, data, _after_delayed_messages_read, _gas_refunder, prev_message_count, new_message_count] = TypeDecoder.decode( Base.decode16!(encoded_params, case: :lower), - %FunctionSelector{ - function: "addSequencerL2BatchFromOrigin", - types: [ - {:uint, 256}, - :bytes, - {:uint, 256}, - :address, - {:uint, 256}, - {:uint, 256} - ] - } + ArbitrumContracts.add_sequencer_l2_batch_from_origin_8f111f3c_selector_with_abi() ) {sequence_number, prev_message_count, new_message_count, data} @@ -1170,16 +1167,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do [sequence_number, _after_delayed_messages_read, _gas_refunder, prev_message_count, new_message_count] = TypeDecoder.decode( Base.decode16!(encoded_params, case: :lower), - %FunctionSelector{ - function: "addSequencerL2BatchFromBlobs", - types: [ - {:uint, 256}, - {:uint, 256}, - :address, - {:uint, 256}, - {:uint, 256} - ] - } + ArbitrumContracts.add_sequencer_l2_batch_from_blobs_selector_with_abi() ) {sequence_number, prev_message_count, new_message_count, nil} @@ -1189,15 +1177,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do [sequence_number, data, _after_delayed_messages_read, _gas_refunder] = TypeDecoder.decode( Base.decode16!(encoded_params, case: :lower), - %FunctionSelector{ - function: "addSequencerL2BatchFromOrigin", - types: [ - {:uint, 256}, - :bytes, - {:uint, 256}, - :address - ] - } + ArbitrumContracts.add_sequencer_l2_batch_from_origin_6f12b0c9_selector_with_abi() ) {sequence_number, nil, nil, data} @@ -1272,7 +1252,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do defp get_expected_highest_block_and_step(batch_number) do # since the default direction for the block range exploration is chosen to be from the highest to lowest # the step is calculated to be positive - case Db.get_batch_by_number(batch_number) do + case DbSettlement.get_batch_by_number(batch_number) do nil -> {nil, nil} @@ -1286,7 +1266,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do defp get_expected_lowest_block_and_step(batch_number) do # since the default direction for the block range exploration is chosen to be from the highest to lowest # the step is calculated to be negative - case Db.get_batch_by_number(batch_number) do + case DbSettlement.get_batch_by_number(batch_number) do nil -> {nil, nil} @@ -1337,7 +1317,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do defp update_lifecycle_transactions_for_new_blocks(existing_commitment_transactions, block_to_ts) do existing_commitment_transactions |> Map.keys() - |> Db.lifecycle_transactions() + |> DbParentChainTransactions.lifecycle_transactions() |> Enum.reduce(%{}, fn transaction, transactions -> block_number = existing_commitment_transactions[transaction.hash] ts = block_to_ts[block_number] @@ -1470,7 +1450,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do # and batch number, ready for database import. defp get_rollup_blocks_and_transactions_from_db(rollup_blocks_numbers, blocks_to_batches) do rollup_blocks_numbers - |> Db.rollup_blocks() + |> DbCommon.rollup_blocks() |> Enum.reduce({%{}, []}, fn block, {blocks_map, transactions_list} -> batch_num = blocks_to_batches[block.number] @@ -1576,8 +1556,8 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do # - A tuple containing: # - A map of rollup blocks associated with the batch numbers, ready for # database import. - # - A list of transactions, each associated with its respective rollup block - # and batch number, ready for database import. + # - A list of transactions, each associated with its respective rollup + # block and batch number, ready for database import. # - The updated counter of processed chunks (usually ignored). @spec recover_rollup_blocks_and_transactions_from_rpc( [non_neg_integer()], @@ -1711,7 +1691,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewBatches do @spec get_committed_l2_to_l1_messages(non_neg_integer()) :: [Arbitrum.Message.to_import()] defp get_committed_l2_to_l1_messages(block_number) do block_number - |> Db.initiated_l2_to_l1_messages() + |> DbMessages.initiated_l2_to_l1_messages() |> Enum.map(fn transaction -> Map.put(transaction, :status, :sent) end) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex index d91c6259bd56..5144e79b3443 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_confirmations.ex @@ -55,14 +55,18 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do """ import EthereumJSONRPC, only: [quantity_to_integer: 1] + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_warning: 1, log_info: 1, log_debug: 1] alias EthereumJSONRPC.Block.ByNumber, as: BlockByNumber alias Indexer.Helper, as: IndexerHelper - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Db.ParentChainTransactions, as: DbParentChainTransactions + alias Indexer.Fetcher.Arbitrum.Utils.Db.Settlement, as: DbSettlement alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Explorer.Chain alias Explorer.Chain.Arbitrum @@ -79,9 +83,6 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do @logs_per_report 10 @zero_counters %{pairs_counter: 1, capped_logs_counter: 0, report?: false} - # keccak256("SendRootUpdated(bytes32,bytes32)") - @send_root_updated_event "0xb4df3847300f076a369cd76d2314b470a1194d9e8a6bb97f1860aee88a5f6748" - @doc """ Discovers and processes new confirmations of rollup blocks within a calculated block range. @@ -354,7 +355,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do # to the rollup genesis and the L1 block _value_. # {lower, higher} - there are no confirmations between L1 block _lower_ # and the L1 block _higher_. - Db.l1_blocks_to_expect_rollup_blocks_confirmation(nil) + DbSettlement.l1_blocks_to_expect_rollup_blocks_confirmation(nil) _ -> {expected_confirmation_start_block, expected_confirmation_end_block} @@ -682,7 +683,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do existing_lifecycle_transactions = transaction_hashes |> Map.values() - |> Db.lifecycle_transactions() + |> DbParentChainTransactions.lifecycle_transactions() |> Enum.reduce(%{}, fn transaction, acc -> Map.put(acc, transaction.hash, transaction) end) @@ -764,7 +765,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do rollup_blocks_to_l1_transactions |> Map.keys() |> Enum.reduce(%{}, fn block_hash, transformed -> - rollup_block_num = Db.rollup_block_hash_to_num(block_hash) + rollup_block_num = DbSettlement.rollup_block_hash_to_num(block_hash) # nil is applicable for the case when the block is not indexed yet by # the block fetcher, it makes sense to skip this block so far @@ -943,7 +944,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do # because the method can be called for guessed block number rather than received from # the batch description or from blocks list received after a batch handling. In this case # the confirmation must be postponed until the corresponding batch is handled. - batch = Db.get_batch_by_rollup_block_number(rollup_block_num) + batch = DbSettlement.get_batch_by_rollup_block_number(rollup_block_num) if batch != nil do log_info( @@ -967,13 +968,13 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do non_neg_integer() ) :: {:ok, [Arbitrum.BatchBlock.to_import()]} | {:error, []} defp discover_rollup_blocks__get_unconfirmed_rollup_blocks(batch, rollup_block_num) do - unconfirmed_rollup_blocks = Db.unconfirmed_rollup_blocks(batch.start_block, rollup_block_num) + unconfirmed_rollup_blocks = DbSettlement.unconfirmed_rollup_blocks(batch.start_block, rollup_block_num) if Enum.empty?(unconfirmed_rollup_blocks) do # Blocks are not found only in case when all blocks in the batch confirmed # or in case when Chain.Block for block in the batch are not received yet - if Db.count_confirmed_rollup_blocks_in_batch(batch.number) == batch.end_block - batch.start_block + 1 do + if DbSettlement.count_confirmed_rollup_blocks_in_batch(batch.number) == batch.end_block - batch.start_block + 1 do log_info("No unconfirmed blocks in the batch #{batch.number}") {:ok, []} else @@ -1230,7 +1231,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do log_debug("Examining the transaction #{event["transactionHash"]}") rollup_block_hash = send_root_updated_event_parse(event) - rollup_block_num = Db.rollup_block_hash_to_num(rollup_block_hash) + rollup_block_num = DbSettlement.rollup_block_hash_to_num(rollup_block_hash) case rollup_block_num do nil -> @@ -1307,7 +1308,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do start_block, end_block, outbox_address, - [@send_root_updated_event], + [ArbitrumEvents.send_root_updated()], json_rpc_named_arguments ) @@ -1503,7 +1504,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do lifecycle_transactions = basic_lifecycle_transactions |> ArbitrumHelper.extend_lifecycle_transactions_with_ts_and_status(l1_blocks_to_ts, track_finalization?) - |> Db.get_indices_for_l1_transactions() + |> DbParentChainTransactions.get_indices_for_l1_transactions() {updated_rollup_blocks, highest_confirmed_block_number} = confirmed_rollup_blocks @@ -1565,7 +1566,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewConfirmations do defp get_confirmed_l2_to_l1_messages(block_number) do block_number - |> Db.sent_l2_to_l1_messages() + |> DbMessages.sent_l2_to_l1_messages() |> Enum.map(fn transaction -> Map.put(transaction, :status, :confirmed) end) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex index ba572c060e4f..43c20c34cb39 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_l1_executions.ex @@ -17,6 +17,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do """ import EthereumJSONRPC, only: [quantity_to_integer: 1] + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents import Indexer.Fetcher.Arbitrum.Utils.Logging, only: [log_info: 1, log_debug: 1] @@ -24,8 +25,10 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do import Explorer.Helper, only: [decode_data: 2] + alias Indexer.Fetcher.Arbitrum.Utils.Db.Messages, as: DbMessages + alias Indexer.Fetcher.Arbitrum.Utils.Db.ParentChainTransactions, as: DbParentChainTransactions alias Indexer.Fetcher.Arbitrum.Utils.Helper, as: ArbitrumHelper - alias Indexer.Fetcher.Arbitrum.Utils.{Db, Rpc} + alias Indexer.Fetcher.Arbitrum.Utils.Rpc alias Indexer.Helper, as: IndexerHelper alias Explorer.Chain @@ -33,10 +36,6 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do require Logger - # keccak256("OutBoxTransactionExecuted(address,address,uint256,uint256)") - @outbox_transaction_executed_event "0x20af7f3bbfe38132b8900ae295cd9c8d1914be7052d061a511f3f728dab18964" - @outbox_transaction_executed_unindexed_params [{:uint, 256}] - @doc """ Discovers and processes new executions of L2-to-L1 messages within the current L1 block range. @@ -238,7 +237,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do start_block, end_block, outbox_address, - [@outbox_transaction_executed_event], + [ArbitrumEvents.outbox_transaction_executed()], json_rpc_named_arguments ) @@ -298,7 +297,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do lifecycle_transactions = basic_lifecycle_transactions |> ArbitrumHelper.extend_lifecycle_transactions_with_ts_and_status(blocks_to_ts, track_finalization?) - |> Db.get_indices_for_l1_transactions() + |> DbParentChainTransactions.get_indices_for_l1_transactions() executions = basics_executions @@ -376,7 +375,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do # Parses `OutBoxTransactionExecuted` event data to extract the transaction index parameter defp outbox_transaction_executed_event_parse(event) do - [transaction_index] = decode_data(event["data"], @outbox_transaction_executed_unindexed_params) + [transaction_index] = decode_data(event["data"], ArbitrumEvents.outbox_transaction_executed_unindexed_params()) transaction_index end @@ -396,7 +395,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do # will check all discovered historical messages to be marked as executed it is not # needed to handle :initiated and :sent of historical messages here, only for # new messages discovered and changed their status from `:sent` to `:confirmed` - confirmed_messages = Db.confirmed_l2_to_l1_messages() + confirmed_messages = DbMessages.confirmed_l2_to_l1_messages() if Enum.empty?(confirmed_messages) do [] @@ -411,7 +410,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewL1Executions do messages_map |> Map.keys() - |> Db.l1_executions() + |> DbMessages.l1_executions() |> Enum.map(fn execution -> messages_map |> Map.get(execution.message_id) diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex index ad586ab4f0ee..a2a1c04524ed 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/new_messages_to_l2.ex @@ -15,6 +15,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 do """ import EthereumJSONRPC, only: [quantity_to_integer: 1] + alias EthereumJSONRPC.Arbitrum.Constants.Events, as: ArbitrumEvents import Explorer.Helper, only: [decode_data: 2] @@ -31,17 +32,6 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 do @types_of_l1_messages_forwarded_to_l2 [3, 7, 9, 12] - # keccak256("MessageDelivered(uint256,bytes32,address,uint8,address,bytes32,uint256,uint64)") - @message_delivered_event "0x5e3c1311ea442664e8b1611bfabef659120ea7a0a2cfc0667700bebc69cbffe1" - @message_delivered_event_unindexed_params [ - :address, - {:uint, 8}, - :address, - {:bytes, 32}, - {:uint, 256}, - {:uint, 64} - ] - @doc """ Discovers new L1-to-L2 messages initiated on L1 within a configured block range and processes them for database import. @@ -242,7 +232,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 do start_block, end_block, bridge_address, - [@message_delivered_event], + [ArbitrumEvents.message_delivered()], json_rpc_named_arguments ) @@ -358,7 +348,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.NewMessagesToL2 do _message_data_hash, _base_fee_l1, timestamp - ] = decode_data(event["data"], @message_delivered_event_unindexed_params) + ] = decode_data(event["data"], ArbitrumEvents.message_delivered_unindexed_params()) message_index = quantity_to_integer(Enum.at(event["topics"], 1)) diff --git a/apps/indexer/lib/indexer/fetcher/contract_code.ex b/apps/indexer/lib/indexer/fetcher/contract_code.ex index 49778f375e26..2a9c0dea8433 100644 --- a/apps/indexer/lib/indexer/fetcher/contract_code.ex +++ b/apps/indexer/lib/indexer/fetcher/contract_code.ex @@ -11,12 +11,34 @@ defmodule Indexer.Fetcher.ContractCode do import EthereumJSONRPC, only: [integer_to_quantity: 1] alias Explorer.Chain - alias Explorer.Chain.{Block, Hash} + alias Explorer.Chain.{Address, Block, Hash} alias Explorer.Chain.Cache.{Accounts, BlockNumber} + alias Explorer.Chain.Zilliqa.Helper, as: ZilliqaHelper alias Indexer.{BufferedTask, Tracer} alias Indexer.Fetcher.CoinBalance.Helper, as: CoinBalanceHelper + alias Indexer.Fetcher.Zilliqa.ScillaSmartContracts, as: ZilliqaScillaSmartContractsFetcher alias Indexer.Transform.Addresses + @transaction_fields ~w(block_number created_contract_address_hash hash type)a + @failed_to_import "failed to import created_contract_code for transactions: " + + @typedoc """ + Represents a list of entries, where each entry is a map containing transaction + fields required for fetching contract codes. + + - `:block_number` - The block number of the transaction. + - `:created_contract_address_hash` - The hash of the created contract + address. + - `:hash` - The hash of the transaction. + - `:type` - The type of the transaction. + """ + @type entry :: %{ + required(:block_number) => Block.block_number(), + required(:created_contract_address_hash) => Hash.Full.t(), + required(:hash) => Hash.Full.t(), + required(:type) => integer() + } + @behaviour BufferedTask @max_batch_size 10 @@ -29,12 +51,14 @@ defmodule Indexer.Fetcher.ContractCode do metadata: [fetcher: :code] ] - @spec async_fetch([%{required(:block_number) => Block.block_number(), required(:hash) => Hash.Full.t()}], boolean()) :: - :ok + @spec async_fetch([entry()], boolean(), integer()) :: :ok def async_fetch(transactions_fields, realtime?, timeout \\ 5000) when is_list(transactions_fields) do - entries = Enum.map(transactions_fields, &entry/1) - - BufferedTask.buffer(__MODULE__, entries, realtime?, timeout) + BufferedTask.buffer( + __MODULE__, + transactions_fields |> Enum.uniq(), + realtime?, + timeout + ) end @doc false @@ -59,11 +83,10 @@ defmodule Indexer.Fetcher.ContractCode do def init(initial, reducer, _) do {:ok, final} = Chain.stream_transactions_with_unfetched_created_contract_codes( - [:block_number, :created_contract_address_hash, :hash], + @transaction_fields, initial, fn transaction_fields, acc -> transaction_fields - |> entry() |> reducer.(acc) end, true @@ -72,22 +95,21 @@ defmodule Indexer.Fetcher.ContractCode do final end - defp entry(%{ - block_number: block_number, - created_contract_address_hash: %Hash{bytes: created_contract_bytes}, - hash: %Hash{bytes: bytes} - }) - when is_integer(block_number) do - {block_number, created_contract_bytes, bytes} - end + @doc """ + Processes a batch of entries to fetch and handle contract code for created + contracts. This function is executed as part of the `BufferedTask` behavior. - defp params({block_number, created_contract_address_hash_bytes, _transaction_hash_bytes}) - when is_integer(block_number) do - {:ok, created_contract_address_hash} = Hash.Address.cast(created_contract_address_hash_bytes) + ## Parameters - %{block_quantity: integer_to_quantity(block_number), address: to_string(created_contract_address_hash)} - end + - `entries`: A list of entries to process. + - `json_rpc_named_arguments`: A list of options for JSON-RPC communication. + ## Returns + + - `:ok`: Indicates successful processing of the contract codes. + - `{:retry, any()}`: Returns the entries for retry if an error occurs during + the fetch operation. + """ @impl BufferedTask @decorate trace( name: "fetch", @@ -95,12 +117,22 @@ defmodule Indexer.Fetcher.ContractCode do service: :indexer, tracer: Tracer ) + @spec run([entry()], [ + {:throttle_timeout, non_neg_integer()} + | {:transport, atom()} + | {:transport_options, any()} + | {:variant, atom()} + ]) :: :ok | {:retry, any()} def run(entries, json_rpc_named_arguments) do Logger.debug("fetching created_contract_code for transactions") entries - |> Enum.uniq() - |> Enum.map(¶ms/1) + |> Enum.map( + &%{ + block_quantity: integer_to_quantity(&1.block_number), + address: to_string(&1.created_contract_address_hash) + } + ) |> EthereumJSONRPC.fetch_codes(json_rpc_named_arguments) |> case do {:ok, create_address_codes} -> @@ -117,9 +149,23 @@ defmodule Indexer.Fetcher.ContractCode do end end + # Imports addresses and their balances for a set of transactions specified + # through `entries`, updating balances of existing addresses. Triggers + # insertion of Scilla smart contracts for Zilliqa. + @spec import_with_balances([Address.t()], [entry()], [ + {:throttle_timeout, non_neg_integer()} + | {:transport, atom()} + | {:transport_options, any()} + | {:variant, atom()} + ]) :: :ok | {:retry, [entry()]} defp import_with_balances(addresses_params, entries, json_rpc_named_arguments) do entries - |> coin_balances_request_params() + |> Enum.map( + &%{ + block_quantity: integer_to_quantity(&1.block_number), + hash_data: to_string(&1.created_contract_address_hash) + } + ) |> EthereumJSONRPC.fetch_balances(json_rpc_named_arguments, BlockNumber.get_max()) |> case do {:ok, fetched_balances} -> @@ -131,21 +177,32 @@ defmodule Indexer.Fetcher.ContractCode do addresses: %{params: merged_addresses_params}, timeout: :infinity }) do - {:ok, imported} -> - Accounts.drop(imported[:addresses]) + {:ok, %{addresses: addresses}} -> + Accounts.drop(addresses) + zilliqa_verify_scilla_contracts(entries, addresses) :ok {:error, step, reason, _changes_so_far} -> Logger.error( fn -> [ - "failed to import created_contract_code for transactions: ", + @failed_to_import, inspect(reason) ] end, step: step ) + {:retry, entries} + + {:error, reason} -> + Logger.error(fn -> + [ + @failed_to_import, + inspect(reason) + ] + end) + {:retry, entries} end @@ -158,11 +215,18 @@ defmodule Indexer.Fetcher.ContractCode do end end - defp coin_balances_request_params(entries) do - Enum.map(entries, fn {block_number, created_contract_address_hash_bytes, _transaction_hash_bytes} -> - {:ok, created_contract_address_hash} = Hash.Address.cast(created_contract_address_hash_bytes) - - %{block_quantity: integer_to_quantity(block_number), hash_data: to_string(created_contract_address_hash)} - end) + # Filters and verifies Scilla smart contracts for Zilliqa. Contracts are + # identified from transaction attributes and matched with provided addresses, + # then processed asynchronously in the separate fetcher. + @spec zilliqa_verify_scilla_contracts([entry()], [Address.t()]) :: :ok + defp zilliqa_verify_scilla_contracts(entries, addresses) do + zilliqa_contract_address_hashes = + entries + |> Enum.filter(&ZilliqaHelper.scilla_transaction?(&1.type)) + |> MapSet.new(& &1.created_contract_address_hash) + + addresses + |> Enum.filter(&MapSet.member?(zilliqa_contract_address_hashes, &1.hash)) + |> ZilliqaScillaSmartContractsFetcher.async_fetch(true) end end diff --git a/apps/indexer/lib/indexer/fetcher/empty_blocks_sanitizer.ex b/apps/indexer/lib/indexer/fetcher/empty_blocks_sanitizer.ex index f7e4bc84c65f..a6a79bce1294 100644 --- a/apps/indexer/lib/indexer/fetcher/empty_blocks_sanitizer.ex +++ b/apps/indexer/lib/indexer/fetcher/empty_blocks_sanitizer.ex @@ -1,7 +1,7 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do @moduledoc """ Periodically checks empty blocks starting from the head of the chain, detects for which blocks transactions should be refetched - and lose consensus for block in order to refetch transactions. + and set refetch_needed=true for block in order to refetch transactions. """ use GenServer @@ -10,13 +10,16 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do require Logger import Ecto.Query, only: [from: 2, subquery: 1, where: 3] - import EthereumJSONRPC, only: [integer_to_quantity: 1, json_rpc: 2, request: 1] + import EthereumJSONRPC, only: [id_to_params: 1, integer_to_quantity: 1, json_rpc: 2, quantity_to_integer: 1] - alias Ecto.Changeset - alias Explorer.{Chain, Repo} - alias Explorer.Chain.{Block, PendingBlockOperation, Transaction} + alias EthereumJSONRPC.Block.ByNumber + alias EthereumJSONRPC.Blocks + alias Explorer.Repo + alias Explorer.Chain.{Block, Hash, PendingBlockOperation, Transaction} alias Explorer.Chain.Cache.BlockNumber + @update_timeout 60_000 + @interval :timer.seconds(10) defstruct interval: @interval, @@ -80,53 +83,119 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do set: [is_empty: false, updated_at: Timex.now()] ) - unprocessed_empty_blocks_from_db = unprocessed_empty_blocks_query_list(limit()) - - unprocessed_empty_blocks_from_db - |> Enum.with_index() - |> Enum.each(fn {{block_number, block_hash}, ind} -> - with {:ok, %{"transactions" => transactions}} <- - %{id: ind, method: "eth_getBlockByNumber", params: [integer_to_quantity(block_number), false]} - |> request() - |> json_rpc(json_rpc_named_arguments) do - transactions_count = - transactions - |> Enum.count() - - if transactions_count > 0 do - Logger.info( - "Block with number #{block_number} and hash #{to_string(block_hash)} is full of transactions. We should set consensus = false for it in order to refetch.", + unprocessed_empty_blocks_list = unprocessed_empty_blocks_list_query(limit()) + + unless Enum.empty?(unprocessed_empty_blocks_list) do + blocks_response = + unprocessed_empty_blocks_list + |> Enum.map(fn {block_number, _} -> %{number: integer_to_quantity(block_number)} end) + |> id_to_params() + |> Blocks.requests(&ByNumber.request(&1, false, false)) + |> json_rpc(json_rpc_named_arguments) + + case blocks_response do + {:ok, result} -> + {non_empty_blocks, empty_blocks} = classify_blocks_from_result(result) + process_non_empty_blocks(non_empty_blocks) + process_empty_blocks(empty_blocks) + + Logger.info("Batch of empty blocks is sanitized", fetcher: :empty_blocks_to_refetch ) - Block.set_refetch_needed(block_number) - else - Logger.debug( - "Block with number #{block_number} and hash #{to_string(block_hash)} is empty. We should set is_empty=true for it.", + {:error, reason} -> + Logger.error( + "Failed to fetch blocks batch: #{inspect(reason)}", fetcher: :empty_blocks_to_refetch ) + end + end + end - set_is_empty_for_block(block_hash, true) - end + defp classify_blocks_from_result(result) do + result + |> Enum.reduce({[], []}, fn %{id: _id, result: block}, {non_empty_blocks, empty_blocks} -> + if Enum.empty?(block["transactions"]) do + {non_empty_blocks, [block_fields(block) | empty_blocks]} + else + {[block_fields(block) | non_empty_blocks], empty_blocks} end end) + end + + defp block_fields(block) do + %{ + number: quantity_to_integer(block["number"]), + hash: block["hash"], + transactions_count: Enum.count(block["transactions"]) + } + end + + defp process_non_empty_blocks([]), + do: + Logger.debug( + "No non empty blocks found", + fetcher: :empty_blocks_to_refetch + ) + + defp process_non_empty_blocks(non_empty_blocks) do + log_message_base = + Enum.reduce(non_empty_blocks, "Blocks \n", fn block, acc -> + acc <> + " with number #{block.number} and hash #{to_string(block.hash)} contains #{inspect(block.transactions_count)} transactions \n" + end) + + log_message = + log_message_base <> + ", but those blocks are empty in Blockscout DB. Setting refetch_needed = true for it to re-fetch." - Logger.info("Batch of empty blocks is sanitized", + Logger.info( + log_message, fetcher: :empty_blocks_to_refetch ) + + Block.set_refetch_needed(non_empty_blocks |> Enum.map(& &1.number)) end - defp set_is_empty_for_block(block_hash, is_empty) do - block = Chain.fetch_block_by_hash(block_hash) + defp process_empty_blocks([]), + do: + Logger.debug( + "No empty blocks found", + fetcher: :empty_blocks_to_refetch + ) + + defp process_empty_blocks(empty_blocks) do + log_message = + "Block with numbers #{inspect(empty_blocks |> Enum.map(& &1.number))} are empty. We're setting is_empty=true for them." + + Logger.debug( + log_message, + fetcher: :empty_blocks_to_refetch + ) + + mark_blocks_as_empty(empty_blocks |> Enum.map(& &1.hash)) + end - block_with_is_empty = - block - |> Changeset.change(%{is_empty: is_empty}) + @spec mark_blocks_as_empty([Hash.Full.t()]) :: + {non_neg_integer(), nil | [term()]} | {:error, %{exception: Postgrex.Error.t()}} + defp mark_blocks_as_empty(block_hashes) do + query = + from( + block in Block, + where: block.hash in ^block_hashes, + # Enforce Block ShareLocks order (see docs: sharelocks.md) + order_by: [asc: block.hash], + lock: "FOR NO KEY UPDATE" + ) - Repo.update(block_with_is_empty) + Repo.update_all( + from(b in Block, join: s in subquery(query), on: b.hash == s.hash, select: b.number), + [set: [is_empty: true, updated_at: Timex.now()]], + timeout: @update_timeout + ) PendingBlockOperation - |> where([po], po.block_hash == ^block_hash) + |> where([po], po.block_hash in ^block_hashes) |> Repo.delete_all() rescue postgrex_error in Postgrex.Error -> @@ -141,6 +210,7 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do where: is_nil(block.is_empty), where: block.number <= ^safe_block_number, where: block.consensus == true, + where: block.refetch_needed == false, order_by: [asc: block.hash], limit: ^limit ) @@ -158,7 +228,7 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do ) end - defp unprocessed_empty_blocks_query_list(limit) do + defp unprocessed_empty_blocks_list_query(limit) do blocks_query = consensus_blocks_with_nil_is_empty_query(limit) query = @@ -167,6 +237,7 @@ defmodule Indexer.Fetcher.EmptyBlocksSanitizer do on: q.number == transaction.block_number, where: is_nil(transaction.block_number), select: {q.number, q.hash}, + distinct: q.number, order_by: [asc: q.hash] ) diff --git a/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex b/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex index e082020e02cf..f202dddafb94 100644 --- a/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex +++ b/apps/indexer/lib/indexer/fetcher/on_demand/token_balance.ex @@ -195,9 +195,16 @@ defmodule Indexer.Fetcher.OnDemand.TokenBalance do end defp prepare_updated_balance({{:error, error}, ctb}, block_number) do + error_message = + if ctb.token_id do + "Error on updating current token #{to_string(ctb.token_contract_address_hash)} balance for address #{to_string(ctb.address_hash)} and token id #{to_string(ctb.token_id)} at block number #{block_number}: " + else + "Error on updating current token #{to_string(ctb.token_contract_address_hash)} balance for address #{to_string(ctb.address_hash)} at block number #{block_number}: " + end + Logger.warning(fn -> [ - "Error on updating current token #{to_string(ctb.token_contract_address_hash)} balance for address #{to_string(ctb.address_hash)} at block number #{block_number}: ", + error_message, inspect(error) ] end) diff --git a/apps/indexer/lib/indexer/fetcher/on_demand/token_instance_metadata_refetch.ex b/apps/indexer/lib/indexer/fetcher/on_demand/token_instance_metadata_refetch.ex index 6d1dd42bc8d7..02127588ba80 100644 --- a/apps/indexer/lib/indexer/fetcher/on_demand/token_instance_metadata_refetch.ex +++ b/apps/indexer/lib/indexer/fetcher/on_demand/token_instance_metadata_refetch.ex @@ -15,6 +15,7 @@ defmodule Indexer.Fetcher.OnDemand.TokenInstanceMetadataRefetch do alias Explorer.Token.MetadataRetriever alias Explorer.Utility.TokenInstanceMetadataRefetchAttempt alias Indexer.Fetcher.TokenInstance.Helper, as: TokenInstanceHelper + alias Indexer.NFTMediaHandler.Queue @max_delay :timer.hours(168) @@ -77,6 +78,8 @@ defmodule Indexer.Fetcher.OnDemand.TokenInstanceMetadataRefetch do %{fetched_token_instance_metadata: [to_string(token_instance.token_contract_address_hash), token_id, metadata]}, :on_demand ) + + Queue.process_new_instance({:ok, %TokenInstance{token_instance | metadata: metadata}}) else {:empty_result, true} -> :ok diff --git a/apps/indexer/lib/indexer/fetcher/optimism.ex b/apps/indexer/lib/indexer/fetcher/optimism.ex index 9deaa81fd95e..6945d3736cf9 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism.ex @@ -10,14 +10,12 @@ defmodule Indexer.Fetcher.Optimism do import EthereumJSONRPC, only: [ - fetch_block_number_by_tag_op_version: 2, json_rpc: 2, integer_to_quantity: 1, quantity_to_integer: 1, request: 1 ] - alias EthereumJSONRPC.Block.ByNumber alias EthereumJSONRPC.Contract alias Explorer.Repo alias Indexer.Fetcher.RollupL1ReorgMonitor @@ -62,9 +60,17 @@ defmodule Indexer.Fetcher.Optimism do first_block = max(last_safe_block - @block_check_interval_range_size, 1) with {:ok, first_block_timestamp} <- - get_block_timestamp_by_number(first_block, json_rpc_named_arguments, Helper.infinite_retries_number()), + Helper.get_block_timestamp_by_number_or_tag( + first_block, + json_rpc_named_arguments, + Helper.infinite_retries_number() + ), {:ok, last_safe_block_timestamp} <- - get_block_timestamp_by_number(last_safe_block, json_rpc_named_arguments, Helper.infinite_retries_number()) do + Helper.get_block_timestamp_by_number_or_tag( + last_safe_block, + json_rpc_named_arguments, + Helper.infinite_retries_number() + ) do block_check_interval = ceil((last_safe_block_timestamp - first_block_timestamp) / (last_safe_block - first_block) * 1000 / 2) @@ -76,55 +82,6 @@ defmodule Indexer.Fetcher.Optimism do end end - @doc """ - Fetches block number by its tag (e.g. `latest` or `safe`) using RPC request. - Performs a specified number of retries (up to) if the first attempt returns error. - """ - @spec get_block_number_by_tag(binary(), list(), non_neg_integer()) :: {:ok, non_neg_integer()} | {:error, atom()} - def get_block_number_by_tag(tag, json_rpc_named_arguments, retries \\ @finite_retries_number) do - error_message = &"Cannot fetch #{tag} block number. Error: #{inspect(&1)}" - - Helper.repeated_call( - &fetch_block_number_by_tag_op_version/2, - [tag, json_rpc_named_arguments], - error_message, - retries - ) - end - - defp get_block_timestamp_by_number_inner(number, json_rpc_named_arguments) do - result = - %{id: 0, number: number} - |> ByNumber.request(false) - |> json_rpc(json_rpc_named_arguments) - - with {:ok, block} <- result, - false <- is_nil(block), - timestamp <- Map.get(block, "timestamp"), - false <- is_nil(timestamp) do - {:ok, quantity_to_integer(timestamp)} - else - {:error, message} -> - {:error, message} - - true -> - {:error, "RPC returned nil."} - end - end - - @doc """ - Fetches block timestamp by its number using RPC request. - Performs a specified number of retries (up to) if the first attempt returns error. - """ - @spec get_block_timestamp_by_number(non_neg_integer(), list(), non_neg_integer()) :: - {:ok, non_neg_integer()} | {:error, any()} - def get_block_timestamp_by_number(number, json_rpc_named_arguments, retries \\ @finite_retries_number) do - func = &get_block_timestamp_by_number_inner/2 - args = [number, json_rpc_named_arguments] - error_message = &"Cannot fetch block ##{number} or its timestamp. Error: #{inspect(&1)}" - Helper.repeated_call(func, args, error_message, retries) - end - @doc """ Fetches logs emitted by the specified contract (address) within the specified block range and the first topic from the RPC node. @@ -222,6 +179,9 @@ defmodule Indexer.Fetcher.Optimism do Indexer.Fetcher.Optimism.WithdrawalEvent, Indexer.Fetcher.Optimism.OutputRoot ] do + # two seconds pause needed to avoid exceeding Supervisor restart intensity when DB issues + :timer.sleep(2000) + {contract_name, table_name, start_block_note} = case caller do Indexer.Fetcher.Optimism.Deposit -> diff --git a/apps/indexer/lib/indexer/fetcher/optimism/deposit.ex b/apps/indexer/lib/indexer/fetcher/optimism/deposit.ex index 5adb4a0e0b0d..6501cb5c89e7 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/deposit.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/deposit.ex @@ -10,7 +10,7 @@ defmodule Indexer.Fetcher.Optimism.Deposit do import Ecto.Query - import EthereumJSONRPC, only: [quantity_to_integer: 1] + import EthereumJSONRPC, only: [id_to_params: 1, quantity_to_integer: 1] import Explorer.Helper, only: [decode_data: 2] alias EthereumJSONRPC.Block.ByNumber @@ -130,7 +130,7 @@ defmodule Indexer.Fetcher.Optimism.Deposit do new_start_block = last_written_block + 1 {:ok, new_end_block} = - Optimism.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()) + Helper.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()) delay = if new_end_block == last_written_block do @@ -303,8 +303,7 @@ defmodule Indexer.Fetcher.Optimism.Deposit do Map.put(acc, event["blockNumber"], 0) end) |> Stream.map(fn {block_number, _} -> %{number: block_number} end) - |> Stream.with_index() - |> Enum.into(%{}, fn {params, id} -> {id, params} end) + |> id_to_params() |> Blocks.requests(&ByNumber.request(&1, false, false)) error_message = &"Cannot fetch blocks with batch request. Error: #{inspect(&1)}. Request: #{inspect(request)}" diff --git a/apps/indexer/lib/indexer/fetcher/optimism/dispute_game.ex b/apps/indexer/lib/indexer/fetcher/optimism/dispute_game.ex index 1f6488859821..f8390fe21403 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/dispute_game.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/dispute_game.ex @@ -52,6 +52,9 @@ defmodule Indexer.Fetcher.Optimism.DisputeGame do def handle_continue(:ok, _state) do Logger.metadata(fetcher: @fetcher_name) + # two seconds pause needed to avoid exceeding Supervisor restart intensity when DB issues + :timer.sleep(2000) + env = Application.get_all_env(:indexer)[Optimism] system_config = env[:optimism_l1_system_config] rpc = env[:optimism_l1_rpc] diff --git a/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex b/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex new file mode 100644 index 000000000000..4c09728625b4 --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex @@ -0,0 +1,688 @@ +defmodule Indexer.Fetcher.Optimism.EIP1559ConfigUpdate do + @moduledoc """ + Fills op_eip1559_config_updates DB table. + + The table stores points when EIP-1559 denominator and multiplier were changed, + and the updated values of these parameters. The point is the L2 block number + and its hash. The block hash is needed to detect a possible past reorg when starting + this fetcher. If the past reorg is detected, the module tries to start from + the previous block and so on until a consensus block is found. + + The parameter values are taken from the `extraData` field of each block. They + are stored in a block starting from the block of Holocene upgrade. Each block + contains the parameters actual for the next blocks (until they are changed again). + The `extraData` field has a format described on the page + https://specs.optimism.io/protocol/holocene/exec-engine.html#dynamic-eip-1559-parameters + + The Holocene activation block is defined with INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP env variable + setting the block timestamp. If this env is not defined, the module won't work. In this case + EIP_1559_BASE_FEE_MAX_CHANGE_DENOMINATOR and EIP_1559_ELASTICITY_MULTIPLIER env variables + will be used as fallback static values. The timestamp can be defined as `0` meaning the Holocene + is activated from genesis block. + """ + + use GenServer + use Indexer.Fetcher + + require Logger + + import EthereumJSONRPC, only: [fetch_blocks_by_numbers: 3, json_rpc: 2, quantity_to_integer: 1] + + alias EthereumJSONRPC.Block.ByHash + alias EthereumJSONRPC.Blocks + alias Explorer.Chain + alias Explorer.Chain.Events.Subscriber + alias Explorer.Chain.Optimism.EIP1559ConfigUpdate + alias Explorer.Chain.RollupReorgMonitorQueue + alias Indexer.Helper + + @fetcher_name :optimism_eip1559_config_updates + @latest_block_check_interval_seconds 60 + @empty_hash "0x0000000000000000000000000000000000000000000000000000000000000000" + + def child_spec(start_link_arguments) do + spec = %{ + id: __MODULE__, + start: {__MODULE__, :start_link, start_link_arguments}, + restart: :transient, + type: :worker + } + + Supervisor.child_spec(spec, []) + end + + def start_link(args, gen_server_options \\ []) do + GenServer.start_link(__MODULE__, args, Keyword.put_new(gen_server_options, :name, __MODULE__)) + end + + @impl GenServer + def init(args) do + json_rpc_named_arguments = args[:json_rpc_named_arguments] + {:ok, %{}, {:continue, json_rpc_named_arguments}} + end + + # Initialization function which is used instead of `init` to avoid Supervisor's stop in case of any critical issues + # during initialization. It checks the value of INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP env variable, waits for the + # Holocene block (if the module starts before Holocene activation), defines the block range which must be scanned + # to handle `extraData` fields, and retrieves the dynamic EIP-1559 parameters (denominator and multiplier) for each block. + # The changed parameter values are then written to the `op_eip1559_config_updates` database table. + # + # The block range is split into chunks which max size is defined by INDEXER_OPTIMISM_L2_HOLOCENE_BLOCKS_CHUNK_SIZE + # env variable. + # + # If the Holocene is not activated yet, the function waits for the Holocene block first. + # + # When the initialization succeeds, the `:continue` message is sent to GenServer to start the catchup loop + # retrieving and saving historical parameter updates. + # + # ## Parameters + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection to L2 RPC node. + # - `_state`: Initial state of the fetcher (empty map when starting). + # + # ## Returns + # - `{:noreply, state}` when the initialization is successful and the fetching can start. The `state` contains + # necessary parameters needed for the fetching. + # - `{:stop, :normal, %{}}` in case of error or when the INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP is not defined. + @impl GenServer + @spec handle_continue(EthereumJSONRPC.json_rpc_named_arguments(), map()) :: + {:noreply, map()} | {:stop, :normal, map()} + def handle_continue(json_rpc_named_arguments, _state) do + Logger.metadata(fetcher: @fetcher_name) + + # two seconds pause needed to avoid exceeding Supervisor restart intensity when DB issues + :timer.sleep(2000) + + env = Application.get_all_env(:indexer)[__MODULE__] + optimism_env = Application.get_all_env(:indexer)[Indexer.Fetcher.Optimism] + timestamp = env[:holocene_timestamp_l2] + + with false <- is_nil(timestamp), + wait_for_holocene(timestamp, json_rpc_named_arguments), + Subscriber.to(:blocks, :realtime), + {:ok, latest_block_number} = + Helper.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()), + l2_block_number = + block_number_by_timestamp(timestamp, optimism_env[:block_duration], json_rpc_named_arguments), + EIP1559ConfigUpdate.remove_invalid_updates(l2_block_number, latest_block_number), + {:ok, last_l2_block_number} <- get_last_l2_block_number(json_rpc_named_arguments) do + Logger.debug("l2_block_number = #{l2_block_number}") + Logger.debug("last_l2_block_number = #{last_l2_block_number}") + + Process.send(self(), :continue, []) + + {:noreply, + %{ + start_block: max(l2_block_number, last_l2_block_number), + end_block: latest_block_number, + chunk_size: env[:chunk_size], + timestamp: timestamp, + mode: :catchup, + realtime_range: nil, + json_rpc_named_arguments: json_rpc_named_arguments + }} + else + true -> + # Holocene timestamp is not defined, so we don't start this module + {:stop, :normal, %{}} + + {:error, error_data} -> + Logger.error("Cannot get last L2 block from RPC by its hash due to RPC error: #{inspect(error_data)}") + {:stop, :normal, %{}} + end + end + + # Performs the main handling loop for the specified block range. The block range is split into chunks. + # Max size of a chunk is defined by INDEXER_OPTIMISM_L2_HOLOCENE_BLOCKS_CHUNK_SIZE env variable. + # + # If there are reorg blocks in the block range, the reorgs are handled. In a normal situation, + # the `:handle_realtime` message is sent to GenServer to get the new block range collected from the + # realtime block fetcher. + # + # ## Parameters + # - `:continue`: The GenServer message. + # - `state`: The current state of the fetcher containing block range, max chunk size, etc. + # + # ## Returns + # - `{:noreply, state}` tuple where `state` is the new state of the fetcher containing the updated block range. + @impl GenServer + def handle_info( + :continue, + %{ + start_block: start_block, + end_block: end_block, + chunk_size: chunk_size, + mode: mode, + json_rpc_named_arguments: json_rpc_named_arguments + } = state + ) do + {new_start_block, new_end_block} = + start_block..end_block + |> Enum.chunk_every(chunk_size) + |> Enum.reduce_while({nil, nil}, fn block_numbers, _acc -> + chunk_start = List.first(block_numbers) + chunk_end = List.last(block_numbers) + + Helper.log_blocks_chunk_handling(chunk_start, chunk_end, start_block, end_block, nil, :L2) + + updates_count = handle_updates(block_numbers, json_rpc_named_arguments) + + Helper.log_blocks_chunk_handling( + chunk_start, + chunk_end, + start_block, + end_block, + "#{updates_count} update(s).", + :L2 + ) + + reorg_block_number = handle_reorgs_queue() + + cond do + is_nil(reorg_block_number) or reorg_block_number > end_block -> + {:cont, {nil, nil}} + + reorg_block_number < start_block -> + {:halt, {nil, nil}} + + true -> + new_start_block = min(chunk_end + 1, reorg_block_number) + new_end_block = reorg_block_number + {:halt, {new_start_block, new_end_block}} + end + end) + + if is_nil(new_start_block) or is_nil(new_end_block) do + Logger.info("The fetcher loop for the range #{inspect(start_block..end_block)} finished.") + + if mode == :catchup do + Logger.info("Switching to realtime mode...") + end + + Process.send(self(), :handle_realtime, []) + {:noreply, state} + else + Process.send(self(), :continue, []) + {:noreply, %{state | start_block: new_start_block, end_block: new_end_block}} + end + end + + # Gets the updated block range, sends it to the main handling loop, and resets the range + # to collect the next range from the realtime block fetcher. If the range is not defined yet, + # the function waits for 3 seconds and repeats the range checking. + # + # ## Parameters + # - `:handle_realtime`: The GenServer message. + # - `state`: The current state of the fetcher. + # + # ## Returns + # - `{:noreply, state}` tuple where `state` is the new state of the fetcher containing the updated block range + # used by the main loop. + @impl GenServer + def handle_info(:handle_realtime, state) do + case Map.get(state, :realtime_range) do + nil -> + Process.send_after(self(), :handle_realtime, 3_000) + {:noreply, state} + + start_block..end_block//_ -> + Process.send(self(), :continue, []) + {:noreply, %{state | start_block: start_block, end_block: end_block, mode: :realtime, realtime_range: nil}} + end + end + + # Catches new block from the realtime block fetcher to form the next block range to handle by the main loop. + # + # ## Parameters + # - `{:chain_event, :blocks, :realtime, blocks}`: The GenServer message containing the list of blocks + # taken by the realtime block fetcher. + # - `state`: The current fetcher state containing the end block of the current main loop range. + # + # ## Returns + # - `{:noreply, state}` tuple where `state` is the new state of the fetcher containing the updated block range + # used by the `:handle_realtime` handler. + @impl GenServer + def handle_info({:chain_event, :blocks, :realtime, blocks}, %{end_block: end_block} = state) do + {new_min, new_max} = + blocks + |> Enum.filter(fn block -> + block.number > end_block + end) + |> Enum.map(fn block -> block.number end) + |> Enum.min_max(fn -> {nil, nil} end) + + new_realtime_range = + if !is_nil(new_min) and !is_nil(new_max) do + case Map.get(state, :realtime_range) do + nil -> Range.new(new_min, new_max) + prev_min..prev_max//_ -> Range.new(min(prev_min, new_min), max(prev_max, new_max)) + end + end + + {:noreply, %{state | realtime_range: new_realtime_range}} + end + + @impl GenServer + def handle_info({ref, _result}, state) do + Process.demonitor(ref, [:flush]) + {:noreply, state} + end + + @doc """ + Catches L2 reorg block from the realtime block fetcher and keeps it in a queue + to handle that later by the main loop. + + ## Parameters + - `reorg_block`: The number of reorg block. + + ## Returns + - nothing. + """ + @spec handle_realtime_l2_reorg(non_neg_integer()) :: any() + def handle_realtime_l2_reorg(reorg_block) do + Logger.warning("L2 reorg was detected at block #{reorg_block}.") + RollupReorgMonitorQueue.reorg_block_push(reorg_block, __MODULE__) + end + + # Removes all rows from the `op_eip1559_config_updates` table which have `l2_block_number` greater or equal to the reorg block number. + # Also, resets the last handled L2 block number in the `last_fetched_counters` database table. + # + # ## Parameters + # - `reorg_block_number`: The L2 reorg block number. + # + # ## Returns + # - nothing. + @spec handle_reorg(non_neg_integer()) :: any() + defp handle_reorg(reorg_block_number) do + deleted_count = EIP1559ConfigUpdate.remove_invalid_updates(0, reorg_block_number - 1) + + Logger.warning( + "As L2 reorg was detected, all rows with l2_block_number >= #{reorg_block_number} were removed from the op_eip1559_config_updates table. Number of removed rows: #{deleted_count}." + ) + + EIP1559ConfigUpdate.set_last_l2_block_hash(@empty_hash) + end + + # Reads reorg block numbers queue, pops the block numbers from that, + # and handles each reorg block from the queue. + # + # ## Returns + # - The earliest reorg block number. + # - `nil` if the queue is empty. + @spec handle_reorgs_queue() :: non_neg_integer() | nil + defp handle_reorgs_queue do + Enum.reduce_while(Stream.iterate(0, &(&1 + 1)), nil, fn _i, acc -> + reorg_block_number = RollupReorgMonitorQueue.reorg_block_pop(__MODULE__) + + if is_nil(reorg_block_number) do + {:halt, acc} + else + handle_reorg(reorg_block_number) + {:cont, min(reorg_block_number, acc)} + end + end) + end + + # Retrieves updated config parameters from the specified blocks and saves them to the database. + # The parameters are read from the `extraData` field which format is as follows: + # 1-byte version ++ 4-byte denominator ++ 4-byte elasticity + # + # The last handled block is kept in the `last_fetched_counters` table to start from that after + # instance restart. + # + # ## Parameters + # - `block_numbers`: The list of block numbers for which we need to check and update config parameters. + # Note that the size of this list cannot be larger than max batch request size on RPC node. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - The number of inserted rows into the `op_eip1559_config_updates` database table. + @spec handle_updates([non_neg_integer()], EthereumJSONRPC.json_rpc_named_arguments()) :: non_neg_integer() + defp handle_updates(block_numbers, json_rpc_named_arguments) do + case fetch_blocks_by_numbers(block_numbers, json_rpc_named_arguments, false) do + {:ok, %Blocks{blocks_params: blocks_params, errors: []}} -> + block_numbers_filtered = + block_numbers + |> Enum.filter(fn block_number -> + Enum.any?(blocks_params, fn b -> + !is_nil(b) and b.number == block_number + end) + end) + + last_block_number = List.last(block_numbers_filtered) + + Enum.reduce(block_numbers_filtered, 0, fn block_number, acc -> + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + block = Enum.find(blocks_params, %{extra_data: "0x"}, fn b -> b.number == block_number end) + + extra_data = + block.extra_data + |> String.trim_leading("0x") + |> Base.decode16!(case: :mixed) + + return = + with {:valid_format, true} <- {:valid_format, byte_size(extra_data) >= 9}, + <> = extra_data, + {:valid_version, _version, true} <- {:valid_version, version, version == 0}, + prev_config = EIP1559ConfigUpdate.actual_config_for_block(block.number), + new_config = {denominator, elasticity}, + {:updated_config, true} <- {:updated_config, prev_config != new_config} do + update_config(block.number, block.hash, denominator, elasticity) + + Logger.info( + "Config was updated at block #{block.number}. Previous one: #{inspect(prev_config)}. New one: #{inspect(new_config)}." + ) + + acc + 1 + else + {:valid_format, false} -> + Logger.warning("extraData of the block ##{block_number} has invalid format. Ignoring it.") + acc + + {:valid_version, version, false} -> + Logger.warning("extraData of the block ##{block_number} has invalid version #{version}. Ignoring it.") + acc + + {:updated_config, false} -> + acc + end + + # credo:disable-for-next-line Credo.Check.Refactor.Nesting + if block.number == last_block_number do + EIP1559ConfigUpdate.set_last_l2_block_hash(block.hash) + end + + return + end) + + {_, message_or_errors} -> + message = + case message_or_errors do + %Blocks{errors: errors} -> errors + msg -> msg + end + + chunk_start = List.first(block_numbers) + chunk_end = List.last(block_numbers) + + Logger.error( + "Cannot fetch blocks #{inspect(chunk_start..chunk_end)}. Error(s): #{inspect(message)} Retrying..." + ) + + :timer.sleep(3000) + handle_updates(block_numbers, json_rpc_named_arguments) + end + end + + # Inserts a new row into the `op_eip1559_config_updates` database table. + # + # ## Parameters + # - `l2_block_number`: L2 block number of the config update. + # - `l2_block_hash`: L2 block hash of the config update. + # - `base_fee_max_change_denominator`: A new value for EIP-1559 denominator. + # - `elasticity_multiplier`: A new value for EIP-1559 multiplier. + @spec update_config(non_neg_integer(), binary(), non_neg_integer(), non_neg_integer()) :: no_return() + defp update_config(l2_block_number, l2_block_hash, base_fee_max_change_denominator, elasticity_multiplier) do + updates = [ + %{ + l2_block_number: l2_block_number, + l2_block_hash: l2_block_hash, + base_fee_max_change_denominator: base_fee_max_change_denominator, + elasticity_multiplier: elasticity_multiplier + } + ] + + {:ok, _} = + Chain.import(%{ + optimism_eip1559_config_updates: %{params: updates}, + timeout: :infinity + }) + end + + # Determines a block number by its timestamp. The function firstly tries to get the nearest block + # number to the specified timestamp using the database. If the block is not found, the RPC is used. + # + # ## Parameters + # - `timestamp`: The timestamp for which the block number is being determined. + # - `block_duration`: The average block duration, seconds. Used for RPC approach. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. Used for RPC approach. + # + # ## Returns + # - The block number corresponding to the given timestamp. + @spec block_number_by_timestamp(non_neg_integer(), non_neg_integer(), EthereumJSONRPC.json_rpc_named_arguments()) :: + non_neg_integer() + defp block_number_by_timestamp(timestamp, block_duration, json_rpc_named_arguments) + + defp block_number_by_timestamp(0, _block_duration, _json_rpc_named_arguments), do: 0 + + defp block_number_by_timestamp(timestamp, block_duration, json_rpc_named_arguments) do + {:ok, timestamp_dt} = DateTime.from_unix(timestamp) + + Logger.info("Trying to detect Holocene block number by its timestamp using indexed L2 blocks...") + + block_number = EIP1559ConfigUpdate.nearest_block_number_to_timestamp(timestamp_dt) + + if is_nil(block_number) do + Logger.info( + "Cannot detect Holocene block number using indexed L2 blocks. Trying to calculate the number using RPC requests..." + ) + + block_number_by_timestamp_from_rpc(timestamp, block_duration, json_rpc_named_arguments) + else + Logger.info("Holocene block number is detected using indexed L2 blocks. The block number is #{block_number}") + block_number + end + end + + # Fetches block data by its hash using RPC request. + # + # ## Parameters + # - `hash`: The block hash. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - `{:ok, block}` tuple in case of success. + # - `{:error, message}` tuple in case of failure. + @spec get_block_by_hash(binary(), EthereumJSONRPC.json_rpc_named_arguments()) :: {:ok, any()} | {:error, any()} + defp get_block_by_hash(hash, json_rpc_named_arguments) do + req = ByHash.request(%{id: 0, hash: hash}, false) + + error_message = &"eth_getBlockByHash failed. Error: #{inspect(&1)}" + + Helper.repeated_call(&json_rpc/2, [req, json_rpc_named_arguments], error_message, Helper.infinite_retries_number()) + end + + # Gets the last known L2 block number from the `op_eip1559_config_updates` database table. + # When the block number is found, the function checks that for actuality (to avoid reorg cases). + # If the block is not consensus, the corresponding row is removed from the table and + # the previous block becomes under consideration, and so on until a row with non-reorged + # block is found. + # + # ## Parameters + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # + # ## Returns + # - `{:ok, number}` tuple with the block number of the last actual row. The number can be `0` if there are no rows. + # - `{:error, message}` tuple in case of RPC error. + @spec get_last_l2_block_number(EthereumJSONRPC.json_rpc_named_arguments()) :: + {:ok, non_neg_integer()} | {:error, any()} + defp get_last_l2_block_number(json_rpc_named_arguments) do + last_l2_block_hash = EIP1559ConfigUpdate.last_l2_block_hash() + + last_l2_block_number = + if last_l2_block_hash != @empty_hash do + case get_block_by_hash(last_l2_block_hash, json_rpc_named_arguments) do + {:ok, nil} -> + # it seems there was a reorg, so we need to reset the block hash in the counter + # and then use the below approach taking the block hash from `op_eip1559_config_updates` table + EIP1559ConfigUpdate.set_last_l2_block_hash(@empty_hash) + nil + + {:ok, last_l2_block} -> + # the block hash is actual, so use the block number + last_l2_block + |> Map.get("number") + |> quantity_to_integer() + + {:error, _} -> + # something went wrong, so use the below approach + nil + end + end + + if is_nil(last_l2_block_number) do + {last_l2_block_number, last_l2_block_hash} = EIP1559ConfigUpdate.get_last_item() + + with {:empty_hash, false} <- {:empty_hash, is_nil(last_l2_block_hash)}, + {:ok, last_l2_block} <- get_block_by_hash(last_l2_block_hash, json_rpc_named_arguments), + {:empty_block, false} <- {:empty_block, is_nil(last_l2_block)} do + {:ok, last_l2_block_number} + else + {:empty_hash, true} -> + {:ok, 0} + + {:error, _} = error -> + error + + {:empty_block, true} -> + Logger.error( + "Cannot find the last L2 block from RPC by its hash (#{last_l2_block_hash}). Probably, there was a reorg on L2 chain. Trying to check preceding block..." + ) + + EIP1559ConfigUpdate.remove_invalid_updates(0, last_l2_block_number - 1) + + get_last_l2_block_number(json_rpc_named_arguments) + end + else + {:ok, last_l2_block_number} + end + end + + # Determines a block number by its timestamp using RPC. The function uses the average block + # duration and the latest block timestamp to calculate the required block number + # by the specified timestamp. + # + # If the found block was created later or earlier than the given timestamp + # (that can happen if the average block timestamp is not constant), the function + # additionally clarifies the block duration using the next block's timestamp + # and tries to calculate the block number again. + # + # ## Parameters + # - `timestamp`: The timestamp for which the block number is being determined. + # - `block_duration`: The average block duration, seconds. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + # - `ref_block_number`: The reference block number for the calculation. If nil, the latest block is used. + # - `ref_block_timestamp`: The timestamp of the reference block number. If nil, the latest block timestamp is used. + # + # ## Returns + # - The block number corresponding to the given timestamp. + @spec block_number_by_timestamp_from_rpc( + non_neg_integer(), + non_neg_integer(), + EthereumJSONRPC.json_rpc_named_arguments(), + non_neg_integer() | nil, + non_neg_integer() | nil + ) :: non_neg_integer() + defp block_number_by_timestamp_from_rpc( + timestamp, + block_duration, + json_rpc_named_arguments, + ref_block_number \\ nil, + ref_block_timestamp \\ nil + ) do + ref_number = + if is_nil(ref_block_number) do + {:ok, latest_block_number} = + Helper.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()) + + latest_block_number + else + ref_block_number + end + + ref_timestamp = + if is_nil(ref_block_timestamp) do + {:ok, block_timestamp} = + Helper.get_block_timestamp_by_number_or_tag( + ref_number, + json_rpc_named_arguments, + Helper.infinite_retries_number() + ) + + block_timestamp + else + ref_block_timestamp + end + + gap = div(abs(ref_timestamp - timestamp), block_duration) + + block_number = + if ref_timestamp > timestamp do + ref_number - gap + else + ref_number + gap + end + + {:ok, block_timestamp} = + Helper.get_block_timestamp_by_number_or_tag( + block_number, + json_rpc_named_arguments, + Helper.infinite_retries_number() + ) + + if block_timestamp == timestamp do + Logger.info("Holocene block number was successfully calculated using RPC. The block number is #{block_number}") + block_number + else + next_block_number = block_number + 1 + + {:ok, next_block_timestamp} = + Helper.get_block_timestamp_by_number_or_tag( + next_block_number, + json_rpc_named_arguments, + Helper.infinite_retries_number() + ) + + if next_block_timestamp == timestamp do + Logger.info( + "Holocene block number was successfully calculated using RPC. The block number is #{next_block_number}" + ) + + next_block_number + else + :timer.sleep(1000) + Logger.info("Another try for Holocene block number calculation using RPC...") + + Logger.debug( + "block_number = #{block_number}, next_block_number = #{next_block_number}, block_timestamp = #{block_timestamp}, next_block_timestamp = #{next_block_timestamp}" + ) + + block_number_by_timestamp_from_rpc( + timestamp, + next_block_timestamp - block_timestamp, + json_rpc_named_arguments, + block_number, + block_timestamp + ) + end + end + end + + # Infinitely waits for the OP Holocene upgrade. + # + # ## Parameters + # - `timestamp`: The timestamp of the Holocene. + # - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + @spec wait_for_holocene(non_neg_integer(), EthereumJSONRPC.json_rpc_named_arguments()) :: any() + defp wait_for_holocene(timestamp, json_rpc_named_arguments) do + {:ok, latest_timestamp} = + Helper.get_block_timestamp_by_number_or_tag(:latest, json_rpc_named_arguments, Helper.infinite_retries_number()) + + if latest_timestamp < timestamp do + Logger.info("Holocene is not activated yet. Waiting for the timestamp #{timestamp} to be reached...") + :timer.sleep(@latest_block_check_interval_seconds * 1_000) + wait_for_holocene(timestamp, json_rpc_named_arguments) + else + Logger.info("Holocene activation detected") + end + end +end diff --git a/apps/indexer/lib/indexer/fetcher/optimism/output_root.ex b/apps/indexer/lib/indexer/fetcher/optimism/output_root.ex index 6223d2f731a2..5908c9778b85 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/output_root.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/output_root.ex @@ -135,7 +135,11 @@ defmodule Indexer.Fetcher.Optimism.OutputRoot do new_start_block = last_written_block + 1 {:ok, new_end_block} = - Optimism.get_block_number_by_tag("latest", json_rpc_named_arguments, IndexerHelper.infinite_retries_number()) + IndexerHelper.get_block_number_by_tag( + "latest", + json_rpc_named_arguments, + IndexerHelper.infinite_retries_number() + ) delay = if new_end_block == last_written_block do diff --git a/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex b/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex index a8daf641a109..26d1179687ff 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex @@ -336,7 +336,7 @@ defmodule Indexer.Fetcher.Optimism.TransactionBatch do new_start_block = last_written_block + 1 {:ok, new_end_block} = - Optimism.get_block_number_by_tag( + Helper.get_block_number_by_tag( "latest", json_rpc_named_arguments, Helper.infinite_retries_number() diff --git a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal.ex b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal.ex index 0c64506ba452..033ae26251b1 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal.ex @@ -49,6 +49,9 @@ defmodule Indexer.Fetcher.Optimism.Withdrawal do def handle_continue(json_rpc_named_arguments, state) do Logger.metadata(fetcher: @fetcher_name) + # two seconds pause needed to avoid exceeding Supervisor restart intensity when DB issues + :timer.sleep(2000) + env = Application.get_all_env(:indexer)[__MODULE__] with {:start_block_l2_undefined, false} <- {:start_block_l2_undefined, is_nil(env[:start_block_l2])}, @@ -141,7 +144,7 @@ defmodule Indexer.Fetcher.Optimism.Withdrawal do if not safe_block_is_latest do # find and fill all events between "safe" and "latest" block (excluding "safe") - {:ok, latest_block} = Optimism.get_block_number_by_tag("latest", json_rpc_named_arguments) + {:ok, latest_block} = Helper.get_block_number_by_tag("latest", json_rpc_named_arguments) fill_block_range(safe_block + 1, latest_block, message_passer, json_rpc_named_arguments, eth_get_logs_range_size) end diff --git a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex index 5efb32390669..f6287903f42f 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/withdrawal_event.ex @@ -10,7 +10,7 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do import Ecto.Query - import EthereumJSONRPC, only: [quantity_to_integer: 1] + import EthereumJSONRPC, only: [id_to_params: 1, quantity_to_integer: 1] alias EthereumJSONRPC.Block.ByNumber alias EthereumJSONRPC.Blocks @@ -137,7 +137,7 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do new_start_block = last_written_block + 1 {:ok, new_end_block} = - Optimism.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()) + Helper.get_block_number_by_tag("latest", json_rpc_named_arguments, Helper.infinite_retries_number()) delay = if new_end_block == last_written_block do @@ -301,8 +301,7 @@ defmodule Indexer.Fetcher.Optimism.WithdrawalEvent do Map.put(acc, event["blockNumber"], 0) end) |> Stream.map(fn {block_number, _} -> %{number: block_number} end) - |> Stream.with_index() - |> Enum.into(%{}, fn {params, id} -> {id, params} end) + |> id_to_params() |> Blocks.requests(&ByNumber.request(&1, true, false)) error_message = &"Cannot fetch blocks with batch request. Error: #{inspect(&1)}. Request: #{inspect(request)}" diff --git a/apps/indexer/lib/indexer/fetcher/polygon_edge/deposit.ex b/apps/indexer/lib/indexer/fetcher/polygon_edge/deposit.ex index ceb65d5c610e..0ebade665768 100644 --- a/apps/indexer/lib/indexer/fetcher/polygon_edge/deposit.ex +++ b/apps/indexer/lib/indexer/fetcher/polygon_edge/deposit.ex @@ -10,7 +10,7 @@ defmodule Indexer.Fetcher.PolygonEdge.Deposit do require Logger - import EthereumJSONRPC, only: [quantity_to_integer: 1] + import EthereumJSONRPC, only: [id_to_params: 1, quantity_to_integer: 1] import Explorer.Helper, only: [decode_data: 2] alias ABI.TypeDecoder @@ -139,8 +139,7 @@ defmodule Indexer.Fetcher.PolygonEdge.Deposit do Map.put(acc, event["blockNumber"], 0) end) |> Stream.map(fn {block_number, _} -> %{number: block_number} end) - |> Stream.with_index() - |> Enum.into(%{}, fn {params, id} -> {id, params} end) + |> id_to_params() |> Blocks.requests(&ByNumber.request(&1, false, false)) error_message = &"Cannot fetch blocks with batch request. Error: #{inspect(&1)}. Request: #{inspect(request)}" diff --git a/apps/indexer/lib/indexer/fetcher/shibarium/l1.ex b/apps/indexer/lib/indexer/fetcher/shibarium/l1.ex index 99b7d7343d0e..f65498e88445 100644 --- a/apps/indexer/lib/indexer/fetcher/shibarium/l1.ex +++ b/apps/indexer/lib/indexer/fetcher/shibarium/l1.ex @@ -346,9 +346,10 @@ defmodule Indexer.Fetcher.Shibarium.L1 do defp get_block_check_interval(json_rpc_named_arguments) do with {:ok, latest_block} <- Helper.get_block_number_by_tag("latest", json_rpc_named_arguments), first_block = max(latest_block - @block_check_interval_range_size, 1), - {:ok, first_block_timestamp} <- Helper.get_block_timestamp_by_number(first_block, json_rpc_named_arguments), + {:ok, first_block_timestamp} <- + Helper.get_block_timestamp_by_number_or_tag(first_block, json_rpc_named_arguments), {:ok, last_safe_block_timestamp} <- - Helper.get_block_timestamp_by_number(latest_block, json_rpc_named_arguments) do + Helper.get_block_timestamp_by_number_or_tag(latest_block, json_rpc_named_arguments) do block_check_interval = ceil((last_safe_block_timestamp - first_block_timestamp) / (latest_block - first_block) * 1000 / 2) diff --git a/apps/indexer/lib/indexer/fetcher/shibarium/l2.ex b/apps/indexer/lib/indexer/fetcher/shibarium/l2.ex index 4007ba2211c5..3aa2d42826c6 100644 --- a/apps/indexer/lib/indexer/fetcher/shibarium/l2.ex +++ b/apps/indexer/lib/indexer/fetcher/shibarium/l2.ex @@ -12,6 +12,7 @@ defmodule Indexer.Fetcher.Shibarium.L2 do import EthereumJSONRPC, only: [ + id_to_params: 1, json_rpc: 2, quantity_to_integer: 1, request: 1 @@ -291,8 +292,7 @@ defmodule Indexer.Fetcher.Shibarium.L2 do request = range |> Stream.map(fn block_number -> %{number: block_number} end) - |> Stream.with_index() - |> Enum.into(%{}, fn {params, id} -> {id, params} end) + |> id_to_params() |> Blocks.requests(&ByNumber.request(&1)) error_message = &"Cannot fetch blocks with batch request. Error: #{inspect(&1)}. Request: #{inspect(request)}" diff --git a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex index cb6a9b4bc3dc..112508478cb6 100644 --- a/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex +++ b/apps/indexer/lib/indexer/fetcher/token_instance/helper.ex @@ -5,6 +5,7 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do alias Explorer.Chain alias Explorer.SmartContract.Reader alias Explorer.Token.MetadataRetriever + alias Indexer.NFTMediaHandler.Queue require Logger @@ -291,7 +292,7 @@ defmodule Indexer.Fetcher.TokenInstance.Helper do end defp upsert_with_rescue(insert_params, token_id, token_contract_address_hash, retrying? \\ false) do - Chain.upsert_token_instance(insert_params) + insert_params |> Chain.upsert_token_instance() |> Queue.process_new_instance() rescue error in Postgrex.Error -> if retrying? do diff --git a/apps/indexer/lib/indexer/fetcher/zilliqa/scilla_smart_contracts.ex b/apps/indexer/lib/indexer/fetcher/zilliqa/scilla_smart_contracts.ex new file mode 100644 index 000000000000..2a022db6de8c --- /dev/null +++ b/apps/indexer/lib/indexer/fetcher/zilliqa/scilla_smart_contracts.ex @@ -0,0 +1,135 @@ +defmodule Indexer.Fetcher.Zilliqa.ScillaSmartContracts do + @moduledoc """ + Marks Scilla smart contracts as verified on the Zilliqa blockchain. These + contracts are treated as verified since their code is stored on-chain, + allowing for direct access. + """ + alias Indexer.{BufferedTask, Tracer} + require Logger + + use Indexer.Fetcher, restart: :permanent + use Spandex.Decorators + + alias Explorer.Chain.{Address, Data, SmartContract} + alias Explorer.Chain.Zilliqa.Reader + + @behaviour BufferedTask + + @default_max_batch_size 1 + @default_max_concurrency 1 + + @doc false + @spec child_spec([...]) :: Supervisor.child_spec() + def child_spec([init_options, gen_server_options]) do + merged_init_opts = + defaults() + |> Keyword.merge(init_options) + |> Keyword.put(:state, nil) + + Supervisor.child_spec( + {BufferedTask, [{__MODULE__, merged_init_opts}, gen_server_options]}, + id: __MODULE__ + ) + end + + def defaults do + [ + poll: false, + flush_interval: :timer.seconds(3), + max_concurrency: Application.get_env(:indexer, __MODULE__)[:concurrency] || @default_max_concurrency, + max_batch_size: Application.get_env(:indexer, __MODULE__)[:batch_size] || @default_max_batch_size, + task_supervisor: __MODULE__.TaskSupervisor, + metadata: [fetcher: :scilla_smart_contracts] + ] + end + + @doc """ + Asynchronously fetches and processes a list of unique Scilla smart contract + addresses for verification. If the associated supervisor is disabled, + the function simply returns `:ok` without performing any action. + + ## Parameters + + - `entries`: A list of `Address.t()` structs representing contract addresses + to be processed. Duplicates are removed before processing. + - `realtime?`: A boolean indicating whether the fetching should occur with priority. + - `timeout`: An integer representing the timeout duration (in milliseconds) + for the fetch operation. Defaults to `5000`. + + ## Returns + + - `:ok`: Always returns `:ok`, either after queuing the unique entries for + buffering or if the supervisor is disabled. + """ + @spec async_fetch([Address.t()], boolean(), integer()) :: :ok + def async_fetch(entries, realtime?, timeout \\ 5000) when is_list(entries) do + if __MODULE__.Supervisor.disabled?() do + :ok + else + BufferedTask.buffer(__MODULE__, entries |> Enum.uniq(), realtime?, timeout) + end + end + + @impl BufferedTask + def init(initial, reducer, _json_rpc_named_arguments) do + {:ok, final} = + Reader.stream_unverified_scilla_smart_contract_addresses( + initial, + reducer, + true + ) + + final + end + + @doc """ + Processes a batch of unverified Scilla smart contract addresses, verifying + each contract's validity and creating it in the database. The function + verifies that each contract's code is a valid UTF-8 string. If valid, it + attempts to create a new smart contract record. + + ## Parameters + + - `[Address.t()]`: A list of addresses, where each address is a struct with + contract data to be verified. + - `_opts`: Additional options for processing, currently unused. + + ## Returns + + - `:ok`: Indicates successful contract creation or if the contract code is + invalid and therefore skipped. + - `:retry`: Returned if an error occurs during contract creation, logging + the failure for later retry. + """ + + @impl BufferedTask + @decorate trace( + name: "fetch", + resource: "Indexer.Fetcher.Zilliqa.ScillaSmartContracts.run/2", + service: :indexer, + tracer: Tracer + ) + @spec run([Address.t()], any()) :: :ok | :retry + def run([%Address{hash: address_hash, contract_code: %Data{} = contract_code}], _opts) do + if String.valid?(contract_code.bytes) do + %{ + address_hash: address_hash, + contract_source_code: contract_code.bytes, + optimization: false, + language: :scilla + } + |> SmartContract.create_smart_contract() + |> case do + {:ok, _} -> + :ok + + {:error, error} -> + Logger.error("Failed to create smart contract for address: #{address_hash}\n#{inspect(error)}") + :retry + end + else + Logger.error("Invalid contract code. Skipping verification", %{address_hash: address_hash}) + :ok + end + end +end diff --git a/apps/indexer/lib/indexer/helper.ex b/apps/indexer/lib/indexer/helper.ex index 9d709360e986..e5870374acf6 100644 --- a/apps/indexer/lib/indexer/helper.ex +++ b/apps/indexer/lib/indexer/helper.ex @@ -8,13 +8,14 @@ defmodule Indexer.Helper do import EthereumJSONRPC, only: [ fetch_block_number_by_tag: 2, + id_to_params: 1, + integer_to_quantity: 1, json_rpc: 2, quantity_to_integer: 1, - integer_to_quantity: 1, request: 1 ] - alias EthereumJSONRPC.Block.ByNumber + alias EthereumJSONRPC.Block.{ByNumber, ByTag} alias EthereumJSONRPC.{Blocks, Transport} alias Explorer.Chain.Hash alias Explorer.SmartContract.Reader, as: ContractReader @@ -108,9 +109,9 @@ defmodule Indexer.Helper do first_block = max(last_safe_block - @block_check_interval_range_size, 1) with {:ok, first_block_timestamp} <- - get_block_timestamp_by_number(first_block, json_rpc_named_arguments, @infinite_retries_number), + get_block_timestamp_by_number_or_tag(first_block, json_rpc_named_arguments, @infinite_retries_number), {:ok, last_safe_block_timestamp} <- - get_block_timestamp_by_number(last_safe_block, json_rpc_named_arguments, @infinite_retries_number) do + get_block_timestamp_by_number_or_tag(last_safe_block, json_rpc_named_arguments, @infinite_retries_number) do block_check_interval = ceil((last_safe_block_timestamp - first_block_timestamp) / (last_safe_block - first_block) * 1000 / 2) @@ -589,8 +590,7 @@ defmodule Indexer.Helper do Map.put(acc, block_number, 0) end) |> Stream.map(fn {block_number, _} -> %{number: block_number} end) - |> Stream.with_index() - |> Enum.into(%{}, fn {params, id} -> {id, params} end) + |> id_to_params() |> Blocks.requests(&ByNumber.request(&1, transaction_details, false)) |> Enum.chunk_every(@block_by_number_chunk_size) |> Enum.reduce([], fn current_requests, results_acc -> @@ -610,22 +610,36 @@ defmodule Indexer.Helper do @doc """ Fetches block timestamp by its number using RPC request. + The number can be `:latest`. Performs a specified number of retries (up to) if the first attempt returns error. + + ## Parameters + - `number`: Block number or `:latest` to fetch the latest block. + - `json_rpc_named_arguments`: Configuration parameters for the JSON RPC connection. + - `retries`: Number of retry attempts if the request fails. + + ## Returns + - `{:ok, timestamp}` where `timestamp` is the block timestamp as a Unix timestamp. + - `{:error, reason}` if the request fails after all retries. """ - @spec get_block_timestamp_by_number(non_neg_integer(), list(), non_neg_integer()) :: + @spec get_block_timestamp_by_number_or_tag(non_neg_integer() | :latest, list(), non_neg_integer()) :: {:ok, non_neg_integer()} | {:error, any()} - def get_block_timestamp_by_number(number, json_rpc_named_arguments, retries \\ @finite_retries_number) do - func = &get_block_timestamp_by_number_inner/2 + def get_block_timestamp_by_number_or_tag(number, json_rpc_named_arguments, retries \\ @finite_retries_number) do + func = &get_block_timestamp_inner/2 args = [number, json_rpc_named_arguments] error_message = &"Cannot fetch block ##{number} or its timestamp. Error: #{inspect(&1)}" repeated_call(func, args, error_message, retries) end - defp get_block_timestamp_by_number_inner(number, json_rpc_named_arguments) do - result = - %{id: 0, number: number} - |> ByNumber.request(false) - |> json_rpc(json_rpc_named_arguments) + defp get_block_timestamp_inner(number, json_rpc_named_arguments) do + request = + if number == :latest do + ByTag.request(%{id: 0, tag: "latest"}) + else + ByNumber.request(%{id: 0, number: number}, false) + end + + result = json_rpc(request, json_rpc_named_arguments) with {:ok, block} <- result, false <- is_nil(block), diff --git a/apps/indexer/lib/indexer/nft_media_handler/backfiller.ex b/apps/indexer/lib/indexer/nft_media_handler/backfiller.ex new file mode 100644 index 000000000000..b8ebea6b81df --- /dev/null +++ b/apps/indexer/lib/indexer/nft_media_handler/backfiller.ex @@ -0,0 +1,104 @@ +defmodule Indexer.NFTMediaHandler.Backfiller do + @moduledoc """ + Module fetches from DB token instances which wasn't processed via NFTMediaHandler yet. Then put it to the queue. + Via get_instances/1 it's possible to get urls to fetch. + """ + alias Explorer.Chain.Token.Instance + + use GenServer + + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + @doc """ + Retrieves a specified number of instances from the queue. + + ## Parameters + - amount: The number of instances to retrieve. + + ## Returns + A list of instances. + """ + @spec get_instances(non_neg_integer) :: list + def get_instances(amount) do + if config()[:enabled?] do + GenServer.call(__MODULE__, {:get_instances, amount}) + else + [] + end + end + + @impl true + def init(_) do + %{ref: ref} = + Task.async(fn -> + Instance.stream_instances_to_resize_and_upload(&enqueue_if_queue_is_not_full/1) + end) + + {:ok, %{queue: %{}, ref_to_stream_task: ref, stream_is_over?: false}} + end + + # Enqueues the given instance if the queue is not full. + # if queue is full, it will wait enqueue_timeout() and call self again. + defp enqueue_if_queue_is_not_full(instance) do + url = Instance.get_media_url_from_metadata_for_nft_media_handler(instance.metadata) + + if !is_nil(url) do + if GenServer.call(__MODULE__, :not_full?) do + GenServer.cast(__MODULE__, {:append_to_queue, {url, instance.token_contract_address_hash, instance.token_id}}) + else + :timer.sleep(enqueue_timeout()) + + enqueue_if_queue_is_not_full(instance) + end + end + end + + # Handles the `:not_full?` call message. + # Returns whether the queue is not full. + + @impl true + def handle_call(:not_full?, _from, %{queue: queue} = state) do + {:reply, Enum.count(queue) < max_queue_size(), state} + end + + # Handles the `:get_instances` call message. + # Returns a specified number of instances from the queue. + @impl true + def handle_call({:get_instances, amount}, _from, %{queue: queue} = state) do + {to_return, remaining} = Enum.split(queue, amount) + {:reply, to_return, %{state | queue: remaining |> Enum.into(%{})}} + end + + # Handles the `:append_to_queue` cast message. + # Appends the given URL, token contract address hash, and token ID to the queue in the state. + @impl true + def handle_cast({:append_to_queue, {url, token_contract_address_hash, token_id}}, %{queue: queue} = state) do + {:noreply, %{state | queue: Map.put(queue, url, [{token_contract_address_hash, token_id} | queue[url] || []])}} + end + + # Handles the termination of the stream task. + @impl true + def handle_info({ref, _answer}, %{ref_to_stream_task: ref} = state) do + {:noreply, %{state | stream_is_over?: true}} + end + + # Handles the termination of the stream task. + @impl true + def handle_info({:DOWN, ref, :process, _pid, _reason}, %{ref_to_stream_task: ref} = state) do + {:noreply, %{state | stream_is_over?: true}} + end + + defp max_queue_size do + config()[:queue_size] + end + + defp enqueue_timeout do + config()[:enqueue_busy_waiting_timeout] + end + + defp config do + Application.get_env(:nft_media_handler, __MODULE__) + end +end diff --git a/apps/indexer/lib/indexer/nft_media_handler/queue.ex b/apps/indexer/lib/indexer/nft_media_handler/queue.ex new file mode 100644 index 000000000000..a36a8c3cf16b --- /dev/null +++ b/apps/indexer/lib/indexer/nft_media_handler/queue.ex @@ -0,0 +1,265 @@ +defmodule Indexer.NFTMediaHandler.Queue do + @moduledoc """ + Queue for fetching media + """ + + use GenServer + + require Logger + alias Explorer.Chain.Token.Instance + alias Explorer.Prometheus.Instrumenter + alias Explorer.Token.MetadataRetriever + alias Indexer.NFTMediaHandler.Backfiller + + @queue_storage :queue_storage + @tasks_in_progress :tasks_in_progress + + @doc """ + Processes new inserted NFT instance. + Adds the instance to the queue if the media handler is enabled and input was in format {:ok, Instance.t()}. + + ## Parameters + + - initial_value: result of inserting an NFT instance. Either {:ok, %Instance{}} or some error. + + ## Returns + + initial_value as is. + """ + @spec process_new_instance(any()) :: any() + def process_new_instance({:ok, %Instance{} = nft} = initial_value) do + if Application.get_env(:nft_media_handler, :enabled?) do + url = Instance.get_media_url_from_metadata_for_nft_media_handler(nft.metadata) + + if url do + GenServer.cast(__MODULE__, {:add_to_queue, {nft.token_contract_address_hash, nft.token_id, url}}) + end + end + + initial_value + end + + def process_new_instance(initial_value), do: initial_value + + def get_urls_to_fetch(amount) do + GenServer.call(__MODULE__, {:get_urls_to_fetch, amount}) + end + + def store_result({:error, reason}, url) do + GenServer.cast(__MODULE__, {:handle_error, url, reason}) + end + + def store_result({:down, reason}, url) do + GenServer.cast(__MODULE__, {:handle_error, url, reason}) + end + + def store_result({result, media_type}, url) do + GenServer.cast(__MODULE__, {:finished, result, url, media_type}) + end + + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + def init(_) do + File.mkdir("./dets") + {:ok, queue} = :dets.open_file(@queue_storage, file: ~c"./dets/#{@queue_storage}", type: :bag) + {:ok, in_progress} = :dets.open_file(@tasks_in_progress, type: :set, file: ~c"./dets/#{@tasks_in_progress}") + + Process.flag(:trap_exit, true) + + {:ok, {queue, in_progress, nil}} + end + + def handle_cast( + {:add_to_queue, {token_address_hash, token_id, media_url}}, + {queue, in_progress, continuation} + ) do + case :dets.lookup(in_progress, media_url) do + [{_, instances, start_time}] -> + Logger.debug( + "Media url already in progress: #{media_url}, will append to instances: {#{to_string(token_address_hash)}, #{token_id}} " + ) + + dets_insert_wrapper(in_progress, {media_url, [{token_address_hash, token_id} | instances], start_time}) + + _ -> + case Cachex.get(cache_uniqueness_name(), media_url) do + {:ok, result} when is_map(result) -> + Logger.debug( + "Media url already fetched: #{media_url}, will take result from cache to: {#{to_string(token_address_hash)}, #{token_id}} " + ) + + Instance.set_cdn_result({token_address_hash, token_id}, result) + + _ -> + dets_insert_wrapper(queue, {media_url, {token_address_hash, token_id}}) + end + end + + {:noreply, {queue, in_progress, continuation}} + end + + def handle_cast({:finished, result, url, media_type}, {_queue, in_progress, _continuation} = state) + when is_list(result) do + case :dets.lookup(in_progress, url) do + [{_, instances, start_time}] -> + now = System.monotonic_time() + :dets.delete(in_progress, url) + + Instrumenter.increment_successfully_uploaded_media_number() + Instrumenter.media_processing_time(System.convert_time_unit(now - start_time, :native, :millisecond) / 1000) + + Enum.each(instances, fn instance_identifier -> + Instance.set_media_urls(instance_identifier, result, media_type) + end) + + put_result_to_cache(url, %{ + thumbnails: result, + media_type: Instance.media_type_to_string(media_type), + cdn_upload_error: nil + }) + + _ -> + Logger.warning("Failed to find instances in in_progress dets for url: #{url}, result: #{inspect(result)}") + end + + {:noreply, state} + end + + def handle_cast({:handle_error, url, reason}, {_queue, in_progress, _continuation} = state) do + case :dets.lookup(in_progress, url) do + [{_, instances, _start_time}] -> + :dets.delete(in_progress, url) + + Instrumenter.increment_failed_uploading_media_number() + + cdn_upload_error = reason |> inspect() |> MetadataRetriever.truncate_error() + + Enum.each(instances, fn instance_identifier -> + Instance.set_cdn_upload_error(instance_identifier, cdn_upload_error) + end) + + put_result_to_cache(url, %{thumbnails: nil, media_type: nil, cdn_upload_error: cdn_upload_error}) + + _ -> + Logger.warning("Failed to find instances in in_progress dets for url: #{url}, error: #{inspect(reason)}") + end + + {:noreply, state} + end + + def handle_call({:get_urls_to_fetch, amount}, _from, {queue, in_progress, continuation} = state) do + {high_priority_urls, continuation} = fetch_urls_from_dets(queue, amount, continuation) + now = System.monotonic_time() + + high_priority_instances = fetch_and_delete_instances_from_queue(queue, high_priority_urls, now) + + taken_amount = Enum.count(high_priority_urls) + + {urls, instances} = + if taken_amount < amount do + backfill_items = + (amount - taken_amount) + |> Backfiller.get_instances() + |> Enum.filter(fn backfill_item -> filter_fetched_backfill_url(backfill_item, state) end) + + {low_priority_instances, low_priority_urls} = + Enum.map_reduce(backfill_items, [], fn {url, instances}, acc -> + {{url, instances, now}, [url | acc]} + end) + + {high_priority_urls ++ low_priority_urls, high_priority_instances ++ low_priority_instances} + else + {high_priority_urls, high_priority_instances} + end + + dets_insert_wrapper(in_progress, instances) + {:reply, urls, {queue, in_progress, continuation}} + end + + @doc """ + Implementation of terminate callback. + Closes opened dets tables on application shutdown. + """ + def terminate(_reason, {queue, in_progress, _continuation}) do + :dets.close(queue) + :dets.close(in_progress) + end + + defp fetch_urls_from_dets(queue_table, amount, continuation) do + query = {:"$1", :_} + + result = + if is_nil(continuation) do + :dets.match(queue_table, query, amount) + else + :dets.match(continuation) + end + + case result do + {:error, reason} -> + Logger.error("Failed to fetch urls from dets: #{inspect(reason)}") + {[], nil} + + :"$end_of_table" -> + {[], nil} + + {urls, :"$end_of_table"} -> + {urls |> List.flatten() |> Enum.uniq(), nil} + + {urls, continuation} -> + {urls |> List.flatten() |> Enum.uniq(), continuation} + end + end + + defp fetch_and_delete_instances_from_queue(queue, urls, start_time) do + Enum.map(urls, fn url -> + instances = + queue + |> :dets.lookup(url) + |> Enum.map(fn {_url, {_address_hash, _token_id} = instance} -> instance end) + + :dets.delete(queue, url) + + {url, instances, start_time} + end) + end + + defp cache_uniqueness_name do + Application.get_env(:nft_media_handler, :cache_uniqueness_name) + end + + defp put_result_to_cache(url, result) do + Cachex.put(cache_uniqueness_name(), url, result) + end + + defp filter_fetched_backfill_url({url, backfill_instances}, {_queue, in_progress, _continuation}) do + case :dets.lookup(in_progress, url) do + [{_, instances, start_time}] -> + Logger.debug("Media url already in progress: #{url}, will append to instances: #{inspect(backfill_instances)}") + + dets_insert_wrapper(in_progress, {url, instances ++ backfill_instances, start_time}) + false + + _ -> + case Cachex.get(cache_uniqueness_name(), url) do + {:ok, result} when is_map(result) -> + Logger.debug("Media url already fetched: #{url}, will copy from cache to: #{inspect(backfill_instances)}") + + Enum.each(backfill_instances, &Instance.set_cdn_result(&1, result)) + false + + _ -> + true + end + end + end + + defp dets_insert_wrapper(table, value) do + case :dets.insert(table, value) do + :ok -> :ok + {:error, reason} -> Logger.error("Failed to insert into dets #{table}: #{inspect(reason)}") + end + end +end diff --git a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex index 11c0c260a571..089624ec5777 100644 --- a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex +++ b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex @@ -15,9 +15,7 @@ defmodule Indexer.PendingTransactionsSanitizer do alias Explorer.{Chain, Repo} alias Explorer.Chain.{Block, Transaction} - @interval :timer.hours(3) - - defstruct interval: @interval, + defstruct interval: nil, json_rpc_named_arguments: [] def child_spec([init_arguments]) do @@ -40,7 +38,7 @@ defmodule Indexer.PendingTransactionsSanitizer do def init(opts) when is_list(opts) do state = %__MODULE__{ json_rpc_named_arguments: Keyword.fetch!(opts, :json_rpc_named_arguments), - interval: opts[:interval] || @interval + interval: Application.get_env(:indexer, __MODULE__)[:interval] } Process.send_after(self(), :sanitize_pending_transactions, state.interval) @@ -127,7 +125,7 @@ defmodule Indexer.PendingTransactionsSanitizer do case transaction |> Changeset.change() - |> Repo.delete() do + |> Repo.delete(timeout: :infinity) do {:ok, _transaction} -> Logger.debug( "Transaction with hash #{pending_transaction_hash_string} successfully deleted from Blockscout DB because it doesn't exist in the archive node anymore", diff --git a/apps/indexer/lib/indexer/supervisor.ex b/apps/indexer/lib/indexer/supervisor.ex index 857a79934792..404145a32a57 100644 --- a/apps/indexer/lib/indexer/supervisor.ex +++ b/apps/indexer/lib/indexer/supervisor.ex @@ -5,6 +5,8 @@ defmodule Indexer.Supervisor do use Supervisor + import Cachex.Spec + alias Explorer.Chain.BridgedToken alias Indexer.{ @@ -156,6 +158,10 @@ defmodule Indexer.Supervisor do [[memory_monitor: memory_monitor, json_rpc_named_arguments: json_rpc_named_arguments]] ), configure(Indexer.Fetcher.Optimism.WithdrawalEvent.Supervisor, [[memory_monitor: memory_monitor]]), + { + Indexer.Fetcher.Optimism.EIP1559ConfigUpdate.Supervisor, + [[memory_monitor: memory_monitor, json_rpc_named_arguments: json_rpc_named_arguments]] + }, configure(Indexer.Fetcher.PolygonEdge.Deposit.Supervisor, [[memory_monitor: memory_monitor]]), configure(Indexer.Fetcher.PolygonEdge.DepositExecute.Supervisor, [ [memory_monitor: memory_monitor, json_rpc_named_arguments: json_rpc_named_arguments] @@ -217,6 +223,7 @@ defmodule Indexer.Supervisor do configure(Indexer.Fetcher.Filecoin.AddressInfo.Supervisor, [ [memory_monitor: memory_monitor] ]), + {Indexer.Fetcher.Zilliqa.ScillaSmartContracts.Supervisor, [[memory_monitor: memory_monitor]]}, {Indexer.Fetcher.Beacon.Blob.Supervisor, [[memory_monitor: memory_monitor]]}, # Out-of-band fetchers @@ -255,6 +262,7 @@ defmodule Indexer.Supervisor do |> maybe_add_block_reward_fetcher( {BlockReward.Supervisor, [[json_rpc_named_arguments: json_rpc_named_arguments, memory_monitor: memory_monitor]]} ) + |> maybe_add_nft_media_handler_processes() Supervisor.init( all_fetchers, @@ -312,6 +320,44 @@ defmodule Indexer.Supervisor do end end + defp maybe_add_nft_media_handler_processes(fetchers) do + base_children = [ + Indexer.NFTMediaHandler.Queue, + {Cachex, + [ + Application.get_env(:nft_media_handler, :cache_uniqueness_name), + [ + hooks: [ + hook( + module: Cachex.Limit.Scheduled, + args: { + # setting cache max size + Application.get_env(:nft_media_handler, :cache_uniqueness_max_size), + # options for `Cachex.prune/3` + [], + # options for `Cachex.Limit.Scheduled` + [] + } + ) + ] + ] + ]} + ] + + children = + if Application.get_env(:nft_media_handler, Indexer.NFTMediaHandler.Backfiller)[:enabled?] do + [Indexer.NFTMediaHandler.Backfiller | base_children] + else + base_children + end + + if Application.get_env(:nft_media_handler, :enabled?) && !Application.get_env(:nft_media_handler, :worker?) do + fetchers ++ children + else + fetchers + end + end + defp configure(process, opts) do # todo: shouldn't we pay attention to process.disabled?() predicate? if Application.get_env(:indexer, process)[:enabled] do diff --git a/apps/indexer/mix.exs b/apps/indexer/mix.exs index 17331a8bbdb8..fc7b08582e49 100644 --- a/apps/indexer/mix.exs +++ b/apps/indexer/mix.exs @@ -10,11 +10,11 @@ defmodule Indexer.MixProject do deps: deps(), deps_path: "../../deps", description: "Fetches block chain data from on-chain node for later reading with Explorer.", - elixir: "~> 1.13", + elixir: "~> 1.17", elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", start_permanent: Mix.env() == :prod, - version: "6.9.2", + version: "6.10.1", xref: [ exclude: [ Explorer.Chain.Optimism.Deposit, @@ -71,7 +71,8 @@ defmodule Indexer.MixProject do {:spandex_datadog, "~> 1.0"}, {:logger_json, "~> 5.1"}, {:varint, "~> 1.4"}, - {:utils, in_umbrella: true} + {:utils, in_umbrella: true}, + {:cachex, "~> 4.0"} ] end diff --git a/apps/nft_media_handler/.gitignore b/apps/nft_media_handler/.gitignore new file mode 100644 index 000000000000..b2f515f982e9 --- /dev/null +++ b/apps/nft_media_handler/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +nft_media_handler-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/nft_media_handler/README.md b/apps/nft_media_handler/README.md new file mode 100644 index 000000000000..99de8f3e21d0 --- /dev/null +++ b/apps/nft_media_handler/README.md @@ -0,0 +1,47 @@ +# NFTMediaHandler + +# NFT Media Handler + +`NFTMediaHandler` is an application for resizing and uploading images to R2/S3 storage. + +## Configuration + +You can modify the application settings in the `config/config.exs` file. Key parameters include: + +- `:enabled?` - Enable/disable the application. +- `:remote?` - Use remote mode. +- `:worker?` - Enable worker mode. +- `:worker_concurrency` - Number of concurrent tasks. +- `:worker_batch_size` - Batch size for tasks. +- `:worker_spawn_tasks_timeout` - Timeout between task spawns. +- `:tmp_dir` - Temporary directory for storing files. + +## Project Structure +- `lib/nft_media_handler/application.ex` - Main application module. +- `lib/nft_media_handler.ex` - Main module for processing and uploading media. +- `lib/nft_media_handler/dispatcher.ex` - Module for managing tasks. +- `lib/nft_media_handler/dispatcher_interface.ex` - Interface for interacting with the dispatcher. +- `lib/nft_media_handler/image/resizer.ex` - Module for resizing images. +- `lib/nft_media_handler/media/fetcher.ex `- Module for fetching media from various sources. +- `lib/nft_media_handler/r2/uploader.ex` - Module for uploading images to R2/S3. + +## Usage Examples + +### Resizing an Image +To resize an image, use the NFTMediaHandler.Image.Resizer.resize/3 function: +``` +image = Vix.Vips.Image.new_from_file("path/to/image.jpg") +resized_images = NFTMediaHandler.Image.Resizer.resize(image, "http://example.com/image.jpg", ".jpg") +``` + +### Uploading an Image +To upload an image, use the NFTMediaHandler.R2.Uploader.upload_image/3 function: +``` +{:ok, result} = NFTMediaHandler.R2.Uploader.upload_image(image_binary, "image.jpg", "folder") +``` + +### Fetching Media +To fetch media, use the NFTMediaHandler.Media.Fetcher.fetch_media/2 function: +``` +{:ok, media_type, body} = NFTMediaHandler.Media.Fetcher.fetch_media("http://example.com/media.jpg", []) +``` diff --git a/apps/nft_media_handler/lib/nft_media_handler.ex b/apps/nft_media_handler/lib/nft_media_handler.ex new file mode 100644 index 000000000000..5b068356e793 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler.ex @@ -0,0 +1,198 @@ +defmodule NFTMediaHandler do + @moduledoc """ + Module resizes and uploads images to R2/S3 bucket. + """ + + require Logger + + alias Explorer.Token.MetadataRetriever, as: TokenMetadataRetriever + alias Image.Video + alias NFTMediaHandler.Image.Resizer + alias NFTMediaHandler.Media.Fetcher + alias NFTMediaHandler.R2.Uploader + alias Vix.Vips.Image, as: VipsImage + + @doc """ + Prepares and uploads media by its URL. + + ## Parameters + + - url: The URL of the media to be prepared and uploaded. + - r2_folder: The destination folder where the media will be uploaded in R2 bucket. + + ## Returns + + - :error if the preparation or upload fails. + - A tuple containing a list of Explorer.Chain.Token.Instance.Thumbnails format and a tuple with content type if successful. + """ + @spec prepare_and_upload_by_url(binary(), binary()) :: {:error, any()} | {list(), {binary(), binary()}} + def prepare_and_upload_by_url(url, r2_folder) do + with {prepared_url, headers} <- maybe_process_ipfs(url), + {:fetch, {:ok, media_type, body}} <- {:fetch, Fetcher.fetch_media(prepared_url, headers)}, + {:ok, result} <- prepare_and_upload_inner(media_type, body, url, r2_folder) do + result + else + {:fetch, {:error, reason}} -> + Logger.warning("Error on fetching media: #{inspect(reason)}, from url (#{url})") + {:error, reason} + + {:error, reason} -> + {:error, reason} + end + end + + defp prepare_and_upload_inner({"image", _} = media_type, initial_image_binary, url, r2_folder) do + with {:image, {:ok, image}} <- {:image, Image.from_binary(initial_image_binary, pages: -1)}, + extension <- media_type_to_extension(media_type), + thumbnails <- Resizer.resize(image, url, ".#{extension}"), + {:original, {:ok, _result}} <- + {:original, + Uploader.upload_image( + initial_image_binary, + Resizer.generate_file_name(url, ".#{extension}", "original"), + r2_folder + )}, + {:thumbnails, {:ok, _result}} <- {:thumbnails, Uploader.upload_images(thumbnails, r2_folder)} do + file_path = Path.join(r2_folder, Resizer.generate_file_name(url, ".#{extension}", "{}")) + original_uploaded? = true + uploaded_thumbnails_sizes = thumbnails |> Enum.map(&elem(&1, 0)) + {:ok, {[file_path, uploaded_thumbnails_sizes, original_uploaded?], media_type}} + else + {:image, {:error, reason}} -> + Logger.warning("Error on open image from url (#{url}): #{inspect(reason)}") + {:error, reason} + + {type, {:error, reason}} -> + Logger.warning("Error on uploading #{type} image from url (#{url}): #{inspect(reason)}") + {:error, reason} + end + end + + defp prepare_and_upload_inner({"video", _} = media_type, body, url, r2_folder) do + extension = media_type_to_extension(media_type) + file_name = Resizer.generate_file_name(url, ".#{extension}", "original") + path = "#{Application.get_env(:nft_media_handler, :tmp_dir)}#{file_name}" + + with {:file, :ok} <- {:file, File.write(path, body)}, + {:video, {:ok, image}} <- + {:video, + Video.with_video(path, fn video -> + Video.image_from_video(video, frame: 0) + end)}, + _ <- remove_file(path), + thumbnails when thumbnails != [] <- image |> Resizer.resize(url, ".jpg"), + {:thumbnails, {:ok, _result}} <- {:thumbnails, Uploader.upload_images(thumbnails, r2_folder)} do + file_path = Path.join(r2_folder, Resizer.generate_file_name(url, ".jpg", "{}")) + uploaded_thumbnails_sizes = thumbnails |> Enum.map(&elem(&1, 0)) + original_uploaded? = true + + {:ok, {[file_path, uploaded_thumbnails_sizes, original_uploaded?], media_type}} + else + {:file, reason} -> + Logger.error("Error while writing video to file: #{inspect(reason)}, url: #{url}") + {:error, reason} + + {:video, {:error, reason}} -> + Logger.error("Error while taking zero frame from video: #{inspect(reason)}, url: #{url}") + remove_file(path) + {:error, reason} + + [] -> + Logger.error("Error while resizing video: No thumbnails generated, url: #{url}") + {:error, :no_thumbnails} + + {:thumbnails, {:error, reason}} -> + Logger.error("Error while uploading video thumbnails: #{inspect(reason)}, url: #{url}") + {:error, reason} + end + end + + defp media_type_to_extension({type, subtype}) do + [extension | _] = MIME.extensions("#{type}/#{subtype}") + extension + end + + @doc """ + Converts an image to a binary format. + + ## Parameters + + - `image`: The `Vix.Vips.Image` struct representing the image to be converted. + - `file_name`: used only for .gif. + - `extension`: The extension of the image format. + + ## Returns + + - `:file_error` if there is an error related to file operations. + - `{:error, reason}` if the conversion fails for any other reason. + - `{:ok, binary}` if the conversion is successful, with the binary representing the image. + """ + @spec image_to_binary(Vix.Vips.Image.t(), binary(), binary()) :: :file_error | {:error, any()} | {:ok, binary()} + def image_to_binary(resized_image, _file_name, extension) when extension in [".jpg", ".png", ".webp"] do + VipsImage.write_to_buffer(resized_image, "#{extension}[Q=70,strip]") + end + + # workaround, because VipsImage.write_to_buffer/2 does not support .gif + def image_to_binary(resized_image, file_name, ".gif") do + path = "#{Application.get_env(:nft_media_handler, :tmp_dir)}#{file_name}" + + with :ok <- VipsImage.write_to_file(resized_image, path), + {:ok, result} <- File.read(path) do + remove_file(path) + {:ok, result} + else + {:error, reason} -> + Logger.error("Error while writing image to file: #{inspect(reason)}, path: #{path}") + :file_error + end + end + + defp remove_file(path) do + case File.rm(path) do + :ok -> + :ok + + {:error, reason} -> + Logger.error("Unable to delete file, reason: #{inspect(reason)}, path: #{path}") + :error + end + end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp maybe_process_ipfs(uri) do + case URI.parse(uri) do + %URI{scheme: "ipfs", host: host, path: path} -> + resource_id = + with "ipfs" <- host, + "/" <> resource_id <- path do + resource_id + else + _ -> + if is_nil(path), do: host, else: host <> path + end + + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + + %URI{scheme: _, path: "/ipfs/" <> resource_id} -> + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + + %URI{scheme: _, path: "ipfs/" <> resource_id} -> + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + + %URI{scheme: scheme} when not is_nil(scheme) -> + {uri, []} + + %URI{path: path} -> + case path do + "Qm" <> <<_::binary-size(44)>> = resource_id -> + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + + "bafybe" <> _ = resource_id -> + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + + _ -> + {uri, []} + end + end + end +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/application.ex b/apps/nft_media_handler/lib/nft_media_handler/application.ex new file mode 100644 index 000000000000..1f415898b080 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/application.ex @@ -0,0 +1,36 @@ +defmodule NFTMediaHandler.Application do + @moduledoc """ + This is the `Application` module for `NFTMediaHandler`. + """ + use Application + + @impl Application + def start(_type, _args) do + File.mkdir(Application.get_env(:nft_media_handler, :tmp_dir)) + + base_children = [ + Supervisor.child_spec({Task.Supervisor, name: NFTMediaHandler.TaskSupervisor}, id: NFTMediaHandler.TaskSupervisor), + NFTMediaHandler.Dispatcher + ] + + children = + if Application.get_env(:nft_media_handler, :standalone_media_worker?) do + [ + NFTMediaHandler.DispatcherInterface + | base_children + ] + else + base_children + end + + opts = [strategy: :one_for_one, name: NFTMediaHandler.Supervisor, max_restarts: 1_000] + + if Application.get_env(:nft_media_handler, :enabled?) && + (!Application.get_env(:nft_media_handler, :remote?) || + (Application.get_env(:nft_media_handler, :remote?) && Application.get_env(:nft_media_handler, :worker?))) do + Supervisor.start_link(children, opts) + else + Supervisor.start_link([], opts) + end + end +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/dispatcher.ex b/apps/nft_media_handler/lib/nft_media_handler/dispatcher.ex new file mode 100644 index 000000000000..52e08851c37c --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/dispatcher.ex @@ -0,0 +1,96 @@ +defmodule NFTMediaHandler.Dispatcher do + @moduledoc """ + Module responsible for spawning tasks for uploading image + and handling responses from that tasks + """ + use GenServer + + alias NFTMediaHandler.DispatcherInterface + alias Task.Supervisor, as: TaskSupervisor + + require Logger + + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + @impl true + def init(_) do + Process.send(self(), :spawn_tasks, []) + + {:ok, + %{ + max_concurrency: Application.get_env(:nft_media_handler, :worker_concurrency), + current_concurrency: 0, + batch_size: Application.get_env(:nft_media_handler, :worker_batch_size), + ref_to_batch: %{} + }} + end + + @impl true + def handle_info( + :spawn_tasks, + %{ + max_concurrency: max_concurrency, + current_concurrency: current_concurrency, + ref_to_batch: tasks_map, + batch_size: batch_size + } = state + ) + when max_concurrency > current_concurrency do + to_spawn = max_concurrency - current_concurrency + + {urls, node, folder} = + (batch_size * to_spawn) + |> DispatcherInterface.get_urls() + + spawned = + urls + |> Enum.chunk_every(batch_size) + |> Enum.map(&run_task(&1, node, folder)) + + Process.send_after(self(), :spawn_tasks, timeout()) + + {:noreply, + %{ + state + | current_concurrency: current_concurrency + Enum.count(spawned), + ref_to_batch: Map.merge(tasks_map, Enum.into(spawned, %{})) + }} + end + + @impl true + def handle_info(:spawn_tasks, state) do + Process.send_after(self(), :spawn_tasks, timeout()) + {:noreply, state} + end + + @impl true + def handle_info({ref, _result}, %{current_concurrency: current_concurrency, ref_to_batch: tasks_map} = state) do + Process.demonitor(ref, [:flush]) + Process.send(self(), :spawn_tasks, []) + + {:noreply, %{state | current_concurrency: current_concurrency - 1, ref_to_batch: Map.drop(tasks_map, [ref])}} + end + + defp run_task(batch, node, folder), + do: + {TaskSupervisor.async_nolink(NFTMediaHandler.TaskSupervisor, fn -> + Enum.map(batch, fn url -> + try do + url + |> NFTMediaHandler.prepare_and_upload_by_url(folder) + |> DispatcherInterface.store_result(url, node) + rescue + error -> + Logger.error( + "Failed to fetch and upload url (#{url}): #{Exception.format(:error, error, __STACKTRACE__)}" + ) + + DispatcherInterface.store_result({:error, error}, url, node) + end + end) + end).ref, {batch, node}} + + defp timeout, do: Application.get_env(:nft_media_handler, :worker_spawn_tasks_timeout) +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/dispatcher_interface.ex b/apps/nft_media_handler/lib/nft_media_handler/dispatcher_interface.ex new file mode 100644 index 000000000000..24ed85f5ff07 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/dispatcher_interface.ex @@ -0,0 +1,93 @@ +defmodule NFTMediaHandler.DispatcherInterface do + @moduledoc """ + Interface to call the Indexer.NFTMediaHandler.Queue. + Calls performed either via direct call to Queue module, or via :rpc.call/4 + """ + require Logger + use GenServer + + def start_link(_) do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + @doc """ + Initializes the dispatcher interface. + """ + @impl true + def init(_) do + nodes = :nft_media_handler |> Application.get_env(:nodes_map) |> Map.to_list() + + if Enum.empty?(nodes) do + {:stop, "NFT_MEDIA_HANDLER_NODES_MAP must contain at least one node"} + else + {:ok, %{used_nodes: [], unused_nodes: nodes}} + end + end + + @doc """ + Handles the `:take_node_to_call` call message. + Takes a node from the list of nodes to call. Nodes rotate in a round-robin fashion. + """ + @impl true + def handle_call(:take_node_to_call, _from, %{used_nodes: used_nodes, unused_nodes: unused_nodes}) do + {used, unused, node_to_call} = + case unused_nodes do + [] -> + [to_call | remains] = used_nodes |> Enum.reverse() + {[to_call], remains, to_call} + + [to_call | remains] -> + {[to_call | used_nodes], remains, to_call} + end + + {:reply, node_to_call, %{used_nodes: used, unused_nodes: unused}} + end + + @doc """ + Retrieves a list of URLs. + + ## Parameters + - amount: The number of URLs to retrieve. + + ## Returns + {list_of_urls, node_where_urls_from, r2_folder_to_store_images} + """ + @spec get_urls(non_neg_integer()) :: {list(), atom(), String.t()} + def get_urls(amount) do + args = [amount] + function = :get_urls_to_fetch + + if Application.get_env(:nft_media_handler, :remote?) do + {node, folder} = GenServer.call(__MODULE__, :take_node_to_call) + + {node |> :rpc.call(Indexer.NFTMediaHandler.Queue, :get_urls_to_fetch, args) |> process_rpc_response(node), node, + folder} + else + folder = Application.get_env(:nft_media_handler, :nodes_map)[:self] + {apply(Indexer.NFTMediaHandler.Queue, function, args), :self, folder} + end + end + + @doc """ + Stores the result of the media fetching process. If the remote flag is set to true, the result will be stored in a remote node. + """ + @spec store_result(any(), String.t(), atom()) :: any() + def store_result(result, url, node) do + remote_call([result, url], :store_result, node, Application.get_env(:nft_media_handler, :remote?)) + end + + defp remote_call(args, function, node, true) do + :rpc.call(node, Indexer.NFTMediaHandler.Queue, function, args) + end + + defp remote_call(args, function, _node, false) do + apply(Indexer.NFTMediaHandler.Queue, function, args) + end + + defp process_rpc_response({:badrpc, _reason} = error, node) do + Logger.error("Received an error from #{node}: #{inspect(error)}") + [] + end + + defp process_rpc_response(response, _node), do: response +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/image/resizer.ex b/apps/nft_media_handler/lib/nft_media_handler/image/resizer.ex new file mode 100644 index 000000000000..659e4c9f0fb4 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/image/resizer.ex @@ -0,0 +1,64 @@ +defmodule NFTMediaHandler.Image.Resizer do + @moduledoc """ + Resizes an image + """ + + @sizes [{60, "60x60"}, {250, "250x250"}, {500, "500x500"}] + require Logger + + @doc """ + Resizes the given image. + + ## Parameters + + - image: The `Vix.Vips.Image` struct representing the image to be resized. + - url: The URL of the image. + - extension: The file extension of the image. + + ## Returns + + A list containing the resized image data. + + """ + @spec resize(Vix.Vips.Image.t(), binary(), binary()) :: list() + def resize(image, url, extension) do + max_size = max(Image.width(image), Image.height(image) / Image.pages(image)) + + @sizes + |> Enum.map(fn {int_size, size} -> + new_file_name = generate_file_name(url, extension, size) + + with {:size, true} <- {:size, max_size > int_size}, + {:ok, resized_image} <- Image.thumbnail(image, size, []), + {:ok, binary} <- NFTMediaHandler.image_to_binary(resized_image, new_file_name, extension) do + {int_size, binary, new_file_name} + else + {:size, _} -> + Logger.debug("Skipped #{size} resizing due to small image size") + nil + + error -> + Logger.warning("Error while #{size} resizing: #{inspect(error)}") + nil + end + end) + |> Enum.reject(&is_nil/1) + end + + @doc """ + Generates a file name for the resized image. + + ## Parameters + - `url`: image url. + - `extension`: file extension of the image. + - `size`: size in pixels ("50x50", "500x500"). + + ## Returns + - `String.t()`: The generated file name as a string in format "\#{uid}_\#{size}\#{extension}" where uid is a :sha hash of the url. + """ + @spec generate_file_name(binary(), binary(), binary()) :: nonempty_binary() + def generate_file_name(url, extension, size) do + uid = :sha |> :crypto.hash(url) |> Base.encode16(case: :lower) + "#{uid}_#{size}#{extension}" + end +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/media/fetcher.ex b/apps/nft_media_handler/lib/nft_media_handler/media/fetcher.ex new file mode 100644 index 000000000000..05d3e44da4d5 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/media/fetcher.ex @@ -0,0 +1,60 @@ +defmodule NFTMediaHandler.Media.Fetcher do + @moduledoc """ + Module fetches media from various sources + """ + + @supported_image_types ["png", "jpeg", "gif", "webp"] + @supported_video_types ["mp4"] + + import Utils.TokenInstanceHelper, only: [media_type: 3] + + @doc """ + Fetches media from the given URL with the specified headers. + + ## Parameters + + - url: A binary string representing the URL to fetch the media from. + - headers: A list of headers to include in the request. + + ## Returns + + The fetched media content. + + ## Examples + + iex> fetch_media("http://example.com/media", [{"Authorization", "Bearer token"}]) + {:ok, media_content} + + """ + @spec fetch_media(binary(), list()) :: {:error, any()} | {:ok, nil | tuple(), any()} + def fetch_media(url, headers) when is_binary(url) do + with media_type <- media_type(url, headers, false), + {:support, true} <- {:support, media_type_supported?(media_type)}, + {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- + HTTPoison.get(url, headers, follow_redirect: true, max_body_length: 20_000_000) do + {:ok, media_type, body} + else + {:support, false} -> + {:error, :unsupported_media_type} + + {:ok, %HTTPoison.Response{status_code: status_code, body: _body}} -> + {:error, status_code} + + {:error, %HTTPoison.Error{reason: reason}} -> + {:error, reason} + end + end + + @spec media_type_supported?(any()) :: boolean() + defp media_type_supported?({"image", image_type}) when image_type in @supported_image_types do + true + end + + defp media_type_supported?({"video", video_type}) when video_type in @supported_video_types do + true + end + + defp media_type_supported?(_) do + false + end +end diff --git a/apps/nft_media_handler/lib/nft_media_handler/r2/uploader.ex b/apps/nft_media_handler/lib/nft_media_handler/r2/uploader.ex new file mode 100644 index 000000000000..5ecebf44bc05 --- /dev/null +++ b/apps/nft_media_handler/lib/nft_media_handler/r2/uploader.ex @@ -0,0 +1,51 @@ +defmodule NFTMediaHandler.R2.Uploader do + @moduledoc """ + Uploads an image to R2/S3 + """ + alias ExAws.S3 + + @doc """ + Uploads an image to the specified destination. + + ## Parameters + + - `file_binary` (binary): The binary data of the image to be uploaded. + - `file_name` (binary): The name of the image file in the R2 bucket. + - `r2_folder` (binary): The folder in the R2 bucket where the image will be stored. + + ## Returns + + - `{:ok, result}`: If the upload is successful, returns a tuple with `:ok` and the result. + - `{:error, reason}`: If the upload fails, returns a tuple with `:error` and the reason for the failure. + """ + @spec upload_image(binary(), binary(), binary()) :: {:ok, any()} | {:error, any()} + def upload_image(file_binary, file_name, r2_folder) do + r2_config = Application.get_env(:ex_aws, :s3) + file_path = Path.join(r2_folder, file_name) + + r2_config[:bucket_name] + |> S3.put_object(file_path, file_binary) + |> ExAws.request() + end + + @doc """ + Uploads a list of images to the specified R2 folder. + + ## Parameters + + - images: A list of images to be uploaded. + - r2_folder: The destination folder in R2 where the images will be uploaded. + """ + @spec upload_images(list(), binary()) :: {:ok, any()} | {:error, any()} + def upload_images(images, r2_folder) do + Enum.reduce_while(images, {:ok, nil}, fn {_pixel_size, file_binary, file_name}, _acc -> + case upload_image(file_binary, file_name, r2_folder) do + {:ok, _} -> + {:cont, {:ok, nil}} + + {:error, reason} -> + {:halt, {:error, reason}} + end + end) + end +end diff --git a/apps/nft_media_handler/mix.exs b/apps/nft_media_handler/mix.exs new file mode 100644 index 000000000000..e6ad16df45c8 --- /dev/null +++ b/apps/nft_media_handler/mix.exs @@ -0,0 +1,47 @@ +defmodule NFTMediaHandler.MixProject do + use Mix.Project + + def project do + [ + app: :nft_media_handler, + version: "6.10.1", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.17", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [mod: {NFTMediaHandler.Application, []}, extra_applications: [:logger]] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + {:ex_aws, "~> 2.0"}, + {:ex_aws_s3, "~> 2.0"}, + {:jason, "~> 1.3"}, + {:hackney, "~> 1.9"}, + {:sweet_xml, "~> 0.7"}, + {:image, "~> 0.54"}, + {:httpoison, "~> 2.0"}, + {:evision, "~> 0.2"}, + {:mime, "~> 2.0"}, + {:utils, in_umbrella: true} + ] + |> optionally_nft_media_handler() + end + + defp optionally_nft_media_handler(deps) do + if Application.get_env(:nft_media_handler, :remote?) do + deps + else + deps ++ [{:indexer, in_umbrella: true}] + end + end +end diff --git a/apps/utils/lib/utils/token_instance_helper.ex b/apps/utils/lib/utils/token_instance_helper.ex new file mode 100644 index 000000000000..cd6f6bbe1588 --- /dev/null +++ b/apps/utils/lib/utils/token_instance_helper.ex @@ -0,0 +1,70 @@ +defmodule Utils.TokenInstanceHelper do + @moduledoc """ + Auxiliary functions for NFTs + """ + + @doc """ + Determines the media type of the given URL. + + ## Parameters + + - url: The URL to check the media type for. + - headers: Optional list of headers to include in the request. Defaults to an empty list. + - treat_data_as_valid_media_type?: Optional boolean flag to treat url of `data:image/` format as a valid media type. Defaults to true. + + ## Returns + + The media type of the given URL, or nil + """ + @spec media_type(binary(), list(), boolean()) :: {binary(), binary()} | nil + def media_type(url, headers \\ [], treat_data_as_valid_media_type? \\ true) + + def media_type("data:image/" <> _data, _headers, true) do + {"image", ""} + end + + def media_type("data:video/" <> _data, _headers, true) do + {"video", ""} + end + + def media_type("data:" <> _data, _headers, _) do + nil + end + + def media_type(media_src, headers, _) when not is_nil(media_src) do + ext = media_src |> Path.extname() |> String.trim() + + mime_type = + if ext == "" do + process_missing_extension(media_src, headers) + else + ext_with_dot = + media_src + |> Path.extname() + + "." <> ext = ext_with_dot + + ext + |> MIME.type() + end + + if mime_type do + mime_type |> String.split("/") |> List.to_tuple() + else + nil + end + end + + def media_type(nil, _headers, _), do: nil + + defp process_missing_extension(media_src, headers) do + case HTTPoison.head(media_src, headers, follow_redirect: true) do + {:ok, %HTTPoison.Response{status_code: 200, headers: headers}} -> + headers_map = Map.new(headers, fn {key, value} -> {String.downcase(key), value} end) + headers_map["content-type"] + + _ -> + nil + end + end +end diff --git a/apps/utils/mix.exs b/apps/utils/mix.exs index a4eae3988c0d..25aef03e5dfe 100644 --- a/apps/utils/mix.exs +++ b/apps/utils/mix.exs @@ -4,7 +4,7 @@ defmodule Utils.MixProject do def project do [ app: :utils, - version: "6.9.2", + version: "6.10.1", build_path: "../../_build", # config_path: "../../config/config.exs", deps_path: "../../deps", @@ -30,7 +30,9 @@ defmodule Utils.MixProject do # Run "mix help deps" to learn about dependencies. defp deps do [ - {:credo, "~> 1.5", only: [:test, :dev], runtime: false} + {:credo, "~> 1.5", only: [:test, :dev], runtime: false}, + {:httpoison, "~> 2.0"}, + {:mime, "~> 2.0"} ] end diff --git a/bin/version_bump.sh b/bin/version_bump.sh index 78f8fa576c4d..512400a554d0 100755 --- a/bin/version_bump.sh +++ b/bin/version_bump.sh @@ -8,9 +8,11 @@ MIX_FILES=( "$(pwd)/apps/indexer/mix.exs" "$(pwd)/apps/ethereum_jsonrpc/mix.exs" "$(pwd)/apps/utils/mix.exs" + "$(pwd)/apps/nft_media_handler/mix.exs" ) CONFIG_FILE="$(pwd)/rel/config.exs" DOCKER_COMPOSE_FILE="$(pwd)/docker-compose/docker-compose.yml" +DOCKER_COMPOSE_NO_SERVICES_FILE="$(pwd)/docker-compose/no-services.yml" MAKE_FILE="$(pwd)/docker/Makefile" WORKFLOW_FILES=($(find "$(pwd)/.github/workflows" -type f \( -name "pre-release*" -o -name "release*" -o -name "publish-regular-docker-image-on-demand*" -o -name "publish-docker-image-*" \))) METADATA_RETRIEVER_FILE="$(pwd)/apps/explorer/lib/explorer/token/metadata_retriever.ex" @@ -62,8 +64,9 @@ bump_version() { sed -i '' "s/version: \"$current_version\"/version: \"$new_version\"/" "$MIX_FILE" done - sed -i '' "s/version: \"$current_version/version: \"$new_version/" "$CONFIG_FILE" + sed -i '' "s/version: \"$current_version\"/version: \"$new_version\"/" "$CONFIG_FILE" sed -i '' "s/RELEASE_VERSION: $current_version/RELEASE_VERSION: $new_version/" "$DOCKER_COMPOSE_FILE" + sed -i '' "s/RELEASE_VERSION: $current_version/RELEASE_VERSION: $new_version/" "$DOCKER_COMPOSE_NO_SERVICES_FILE" sed -i '' "s/RELEASE_VERSION ?= '$current_version'/RELEASE_VERSION ?= '$new_version'/" "$MAKE_FILE" # Replace the old version with the new version in the GitHub workflows files @@ -77,4 +80,4 @@ bump_version() { } # Call the function -bump_version "$1" "$2" \ No newline at end of file +bump_version "$1" "$2" diff --git a/config/config_helper.exs b/config/config_helper.exs index 22bf43f26480..2eed8a5a48f7 100644 --- a/config/config_helper.exs +++ b/config/config_helper.exs @@ -286,6 +286,19 @@ defmodule ConfigHelper do err -> raise "Invalid JSON in environment variable #{env_var}: #{inspect(err)}" end + def parse_json_with_atom_keys_env_var(env_var, default_value \\ "{}") do + with {:ok, map} <- + env_var + |> safe_get_env(default_value) + |> Jason.decode() do + for {key, value} <- map, into: %{}, do: {String.to_atom(key), value} + else + {:error, error} -> raise "Invalid JSON in environment variable #{env_var}: #{inspect(error)}" + end + rescue + error -> raise "Invalid JSON in environment variable #{env_var}: #{inspect(error)}" + end + @spec parse_list_env_var(String.t(), String.t() | nil) :: list() def parse_list_env_var(env_var, default_value \\ nil) do addresses_var = safe_get_env(env_var, default_value) @@ -305,6 +318,21 @@ defmodule ConfigHelper do end end + @spec parse_url_env_var(String.t(), boolean()) :: String.t() | nil + def parse_url_env_var(env_var, default_value \\ nil, trailing_slash_needed? \\ false) do + with url when not is_nil(url) <- safe_get_env(env_var, default_value), + url <- String.trim_trailing(url, "/"), + {url, true} <- {url, trailing_slash_needed?} do + url <> "/" + else + {url, false} -> + url + + nil -> + nil + end + end + @supported_chain_types [ "default", "arbitrum", @@ -333,17 +361,33 @@ defmodule ConfigHelper do @spec mode :: atom() def mode, do: parse_catalog_value("APPLICATION_MODE", @supported_modes, true, "all") - @spec eth_call_url(String.t() | nil) :: String.t() | nil - def eth_call_url(default \\ nil) do - System.get_env("ETHEREUM_JSONRPC_ETH_CALL_URL") || System.get_env("ETHEREUM_JSONRPC_HTTP_URL") || default - end - - def parse_urls_list(urls_var, url_var, default_url \\ nil) do - default = default_url || System.get_env("ETHEREUM_JSONRPC_HTTP_URL") - - case parse_list_env_var(urls_var) do - [] -> [safe_get_env(url_var, default)] - urls -> urls + @doc """ + Retrieves json rpc urls list based on `urls_type` + """ + @spec parse_urls_list( + :http | :trace | :eth_call | :fallback_http | :fallback_trace | :fallback_eth_call, + String.t() | nil + ) :: [String.t()] + def parse_urls_list(urls_type, default_url \\ nil) do + {urls_var, url_var} = define_urls_vars(urls_type) + + with [] <- parse_list_env_var(urls_var), + "" <- safe_get_env(url_var, default_url) do + case urls_type do + :http -> + Logger.warning("ETHEREUM_JSONRPC_HTTP_URL (or ETHEREUM_JSONRPC_HTTP_URLS) env variable is required") + [] + + :fallback_http -> + parse_urls_list(:http) + + _other -> + new_urls_type = if String.contains?(to_string(urls_type), "fallback"), do: :fallback_http, else: :http + parse_urls_list(new_urls_type) + end + else + urls when is_list(urls) -> urls + url -> [url] end end @@ -384,4 +428,17 @@ defmodule ConfigHelper do end defp valid_url?(_), do: false + + defp define_urls_vars(:http), do: {"ETHEREUM_JSONRPC_HTTP_URLS", "ETHEREUM_JSONRPC_HTTP_URL"} + defp define_urls_vars(:trace), do: {"ETHEREUM_JSONRPC_TRACE_URLS", "ETHEREUM_JSONRPC_TRACE_URL"} + defp define_urls_vars(:eth_call), do: {"ETHEREUM_JSONRPC_ETH_CALL_URLS", "ETHEREUM_JSONRPC_ETH_CALL_URL"} + + defp define_urls_vars(:fallback_http), + do: {"ETHEREUM_JSONRPC_FALLBACK_HTTP_URLS", "ETHEREUM_JSONRPC_FALLBACK_HTTP_URL"} + + defp define_urls_vars(:fallback_trace), + do: {"ETHEREUM_JSONRPC_FALLBACK_TRACE_URLS", "ETHEREUM_JSONRPC_FALLBACK_TRACE_URL"} + + defp define_urls_vars(:fallback_eth_call), + do: {"ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URLS", "ETHEREUM_JSONRPC_FALLBACK_ETH_CALL_URL"} end diff --git a/config/runtime.exs b/config/runtime.exs index 3c0d03f37f63..dcbb8172771b 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -67,6 +67,16 @@ config :block_scout_web, BlockScoutWeb.Endpoint, render_errors: [view: BlockScoutWeb.ErrorView, accepts: ~w(html json)], pubsub_server: BlockScoutWeb.PubSub +config :block_scout_web, BlockScoutWeb.HealthEndpoint, + server: true, + url: [ + path: network_path, + scheme: System.get_env("BLOCKSCOUT_PROTOCOL") || "http", + host: System.get_env("BLOCKSCOUT_HOST") || "localhost" + ], + render_errors: [view: BlockScoutWeb.ErrorView, accepts: ~w(html json)], + pubsub_server: BlockScoutWeb.PubSub + config :block_scout_web, BlockScoutWeb.Chain, network: System.get_env("NETWORK"), subnetwork: System.get_env("SUBNETWORK"), @@ -177,7 +187,9 @@ config :ueberauth, Ueberauth, logout_url: "https://#{System.get_env("ACCOUNT_AUT ### Ethereum JSONRPC ### ######################## -trace_url_missing? = System.get_env("ETHEREUM_JSONRPC_TRACE_URL") in ["", nil] +trace_url_missing? = + System.get_env("ETHEREUM_JSONRPC_TRACE_URL") in ["", nil] and + System.get_env("ETHEREUM_JSONRPC_TRACE_URLS") in ["", nil] config :ethereum_jsonrpc, rpc_transport: if(System.get_env("ETHEREUM_JSONRPC_TRANSPORT", "http") == "http", do: :http, else: :ipc), @@ -220,13 +232,14 @@ config :ethereum_jsonrpc, EthereumJSONRPC.Utility.EndpointAvailabilityChecker, e disable_indexer? = ConfigHelper.parse_bool_env_var("DISABLE_INDEXER") disable_webapp? = ConfigHelper.parse_bool_env_var("DISABLE_WEBAPP") +app_mode = ConfigHelper.mode() disable_exchange_rates? = ConfigHelper.parse_bool_env_var("DISABLE_EXCHANGE_RATES") checksum_function = System.get_env("CHECKSUM_FUNCTION") exchange_rates_coin = System.get_env("EXCHANGE_RATES_COIN") config :explorer, - mode: ConfigHelper.mode(), + mode: app_mode, coin: System.get_env("COIN") || exchange_rates_coin || "ETH", coin_name: System.get_env("COIN_NAME") || exchange_rates_coin || "ETH", allowed_solidity_evm_versions: @@ -238,12 +251,12 @@ config :explorer, include_uncles_in_average_block_time: ConfigHelper.parse_bool_env_var("UNCLES_IN_AVERAGE_BLOCK_TIME"), healthy_blocks_period: ConfigHelper.parse_time_env_var("HEALTHY_BLOCKS_PERIOD", "5m"), realtime_events_sender: - if(disable_api? or disable_webapp?, - do: Explorer.Chain.Events.DBSender, - else: Explorer.Chain.Events.SimpleSender - ), - restricted_list: System.get_env("RESTRICTED_LIST"), - restricted_list_key: System.get_env("RESTRICTED_LIST_KEY"), + (case app_mode do + :all -> Explorer.Chain.Events.SimpleSender + separate_setup when separate_setup in [:indexer, :api] -> Explorer.Chain.Events.DBSender + end), + addresses_blacklist: System.get_env("ADDRESSES_BLACKLIST"), + addresses_blacklist_key: System.get_env("ADDRESSES_BLACKLIST_KEY"), checksum_function: checksum_function && String.to_atom(checksum_function), elasticity_multiplier: ConfigHelper.parse_integer_env_var("EIP_1559_ELASTICITY_MULTIPLIER", 2), base_fee_max_change_denominator: ConfigHelper.parse_integer_env_var("EIP_1559_BASE_FEE_MAX_CHANGE_DENOMINATOR", 8), @@ -258,7 +271,7 @@ config :explorer, :proxy, fallback_cached_implementation_data_ttl: :timer.seconds(4), implementation_data_fetching_timeout: :timer.seconds(2) -config :explorer, Explorer.Chain.Events.Listener, enabled: disable_indexer? +config :explorer, Explorer.Chain.Events.Listener, enabled: app_mode == :api precompiled_config_base_dir = case config_env() do @@ -539,11 +552,16 @@ config :explorer, Explorer.MicroserviceInterfaces.AccountAbstraction, config :explorer, Explorer.MicroserviceInterfaces.Metadata, service_url: System.get_env("MICROSERVICE_METADATA_URL"), - enabled: ConfigHelper.parse_bool_env_var("MICROSERVICE_METADATA_ENABLED") + enabled: ConfigHelper.parse_bool_env_var("MICROSERVICE_METADATA_ENABLED"), + proxy_requests_timeout: ConfigHelper.parse_time_env_var("MICROSERVICE_METADATA_PROXY_REQUESTS_TIMEOUT", "30s") config :explorer, Explorer.SmartContract.StylusVerifierInterface, service_url: ConfigHelper.parse_microservice_url("MICROSERVICE_STYLUS_VERIFIER_URL") +config :explorer, Explorer.MicroserviceInterfaces.MultichainSearch, + api_key: System.get_env("MICROSERVICE_MULTICHAIN_SEARCH_API_KEY"), + service_url: System.get_env("MICROSERVICE_MULTICHAIN_SEARCH_URL") + config :explorer, :air_table_public_tags, table_url: System.get_env("ACCOUNT_PUBLIC_TAGS_AIRTABLE_URL"), api_key: System.get_env("ACCOUNT_PUBLIC_TAGS_AIRTABLE_API_KEY") @@ -623,11 +641,18 @@ config :explorer, Explorer.Migrator.TokenTransferTokenType, config :explorer, Explorer.Migrator.SanitizeIncorrectNFTTokenTransfers, batch_size: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_NFT_BATCH_SIZE", 100), - concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_NFT_CONCURRENCY", 1) + concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_NFT_CONCURRENCY", 1), + timeout: ConfigHelper.parse_time_env_var("SANITIZE_INCORRECT_NFT_TIMEOUT", "0s") config :explorer, Explorer.Migrator.SanitizeIncorrectWETHTokenTransfers, batch_size: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_WETH_BATCH_SIZE", 100), - concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_WETH_CONCURRENCY", 1) + concurrency: ConfigHelper.parse_integer_env_var("SANITIZE_INCORRECT_WETH_CONCURRENCY", 1), + timeout: ConfigHelper.parse_time_env_var("SANITIZE_INCORRECT_WETH_TIMEOUT", "0s") + +config :explorer, Explorer.Migrator.ReindexInternalTransactionsWithIncompatibleStatus, + batch_size: ConfigHelper.parse_integer_env_var("REINDEX_INTERNAL_TRANSACTIONS_STATUS_BATCH_SIZE", 100), + concurrency: ConfigHelper.parse_integer_env_var("REINDEX_INTERNAL_TRANSACTIONS_STATUS_CONCURRENCY", 1), + timeout: ConfigHelper.parse_time_env_var("REINDEX_INTERNAL_TRANSACTIONS_STATUS_TIMEOUT", "0s") config :explorer, Explorer.Migrator.RestoreOmittedWETHTransfers, concurrency: ConfigHelper.parse_integer_env_var("MIGRATION_RESTORE_OMITTED_WETH_TOKEN_TRANSFERS_CONCURRENCY", 5), @@ -647,6 +672,15 @@ config :explorer, Explorer.Migrator.ShrinkInternalTransactions, batch_size: ConfigHelper.parse_integer_env_var("SHRINK_INTERNAL_TRANSACTIONS_BATCH_SIZE", 100), concurrency: ConfigHelper.parse_integer_env_var("SHRINK_INTERNAL_TRANSACTIONS_CONCURRENCY", 10) +config :explorer, Explorer.Migrator.BackfillMultichainSearchDB, + concurrency: 1, + batch_size: ConfigHelper.parse_integer_env_var("MIGRATION_BACKFILL_MULTICHAIN_SEARCH_BATCH_SIZE", 10) + +config :explorer, Explorer.Migrator.ArbitrumDaRecordsNormalization, + enabled: ConfigHelper.chain_type() == :arbitrum, + batch_size: ConfigHelper.parse_integer_env_var("ARBITRUM_DA_RECORDS_NORMALIZATION_MIGRATION_BATCH_SIZE", 500), + concurrency: ConfigHelper.parse_integer_env_var("ARBITRUM_DA_RECORDS_NORMALIZATION_MIGRATION_CONCURRENCY", 1) + config :explorer, Explorer.Chain.BridgedToken, eth_omni_bridge_mediator: System.get_env("BRIDGED_TOKENS_ETH_OMNI_BRIDGE_MEDIATOR"), bsc_omni_bridge_mediator: System.get_env("BRIDGED_TOKENS_BSC_OMNI_BRIDGE_MEDIATOR"), @@ -675,6 +709,15 @@ config :explorer, Explorer.Migrator.FilecoinPendingAddressOperations, config :explorer, Explorer.Chain.Blackfort.Validator, api_url: System.get_env("BLACKFORT_VALIDATOR_API_URL") +addresses_blacklist_url = ConfigHelper.parse_microservice_url("ADDRESSES_BLACKLIST_URL") + +config :explorer, Explorer.Chain.Fetcher.AddressesBlacklist, + url: addresses_blacklist_url, + enabled: !is_nil(addresses_blacklist_url), + update_interval: ConfigHelper.parse_time_env_var("ADDRESSES_BLACKLIST_UPDATE_INTERVAL", "15m"), + retry_interval: ConfigHelper.parse_time_env_var("ADDRESSES_BLACKLIST_RETRY_INTERVAL", "5s"), + provider: ConfigHelper.parse_catalog_value("ADDRESSES_BLACKLIST_PROVIDER", ["blockaid"], false, "blockaid") + ############### ### Indexer ### ############### @@ -743,6 +786,9 @@ config :indexer, Indexer.Fetcher.TransactionAction, "0xC36442b4a4522E871399CD717aBDD847Ab11FE88" ) +config :indexer, Indexer.PendingTransactionsSanitizer, + interval: ConfigHelper.parse_time_env_var("INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL", "1h") + config :indexer, Indexer.Fetcher.PendingTransaction.Supervisor, disabled?: ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER") @@ -813,7 +859,7 @@ config :indexer, Indexer.Fetcher.TokenInstance.SanitizeERC721, enabled: !ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_TOKEN_INSTANCE_ERC_721_SANITIZE_FETCHER", "false") config :indexer, Indexer.Fetcher.EmptyBlocksSanitizer, - batch_size: ConfigHelper.parse_integer_env_var("INDEXER_EMPTY_BLOCKS_SANITIZER_BATCH_SIZE", 100), + batch_size: ConfigHelper.parse_integer_env_var("INDEXER_EMPTY_BLOCKS_SANITIZER_BATCH_SIZE", 10), interval: ConfigHelper.parse_time_env_var("INDEXER_EMPTY_BLOCKS_SANITIZER_INTERVAL", "10s") config :indexer, Indexer.Block.Realtime.Fetcher, @@ -888,6 +934,9 @@ config :indexer, Indexer.Fetcher.Optimism.Deposit.Supervisor, enabled: ConfigHel config :indexer, Indexer.Fetcher.Optimism.Withdrawal.Supervisor, enabled: ConfigHelper.chain_type() == :optimism config :indexer, Indexer.Fetcher.Optimism.WithdrawalEvent.Supervisor, enabled: ConfigHelper.chain_type() == :optimism +config :indexer, Indexer.Fetcher.Optimism.EIP1559ConfigUpdate.Supervisor, + disabled?: ConfigHelper.chain_type() != :optimism + config :indexer, Indexer.Fetcher.Optimism, optimism_l1_rpc: System.get_env("INDEXER_OPTIMISM_L1_RPC"), optimism_l1_system_config: System.get_env("INDEXER_OPTIMISM_L1_SYSTEM_CONFIG_CONTRACT"), @@ -916,6 +965,10 @@ config :indexer, Indexer.Fetcher.Optimism.TransactionBatch, inbox: System.get_env("INDEXER_OPTIMISM_L1_BATCH_INBOX"), submitter: System.get_env("INDEXER_OPTIMISM_L1_BATCH_SUBMITTER") +config :indexer, Indexer.Fetcher.Optimism.EIP1559ConfigUpdate, + chunk_size: ConfigHelper.parse_integer_env_var("INDEXER_OPTIMISM_L2_HOLOCENE_BLOCKS_CHUNK_SIZE", 25), + holocene_timestamp_l2: ConfigHelper.parse_integer_or_nil_env_var("INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP") + config :indexer, Indexer.Fetcher.Withdrawal.Supervisor, disabled?: System.get_env("INDEXER_DISABLE_WITHDRAWALS_FETCHER", "true") == "true" @@ -1156,6 +1209,45 @@ config :indexer, Indexer.Fetcher.Scroll.BridgeL2.Supervisor, disabled?: ConfigHe config :indexer, Indexer.Fetcher.Scroll.Batch.Supervisor, disabled?: ConfigHelper.chain_type() != :scroll +config :ex_aws, + json_codec: Jason, + access_key_id: System.get_env("NFT_MEDIA_HANDLER_AWS_ACCESS_KEY_ID"), + secret_access_key: System.get_env("NFT_MEDIA_HANDLER_AWS_SECRET_ACCESS_KEY") + +config :ex_aws, :s3, + scheme: "https://", + host: System.get_env("NFT_MEDIA_HANDLER_AWS_BUCKET_HOST"), + port: nil, + public_r2_url: ConfigHelper.parse_url_env_var("NFT_MEDIA_HANDLER_AWS_PUBLIC_BUCKET_URL", nil, false), + bucket_name: System.get_env("NFT_MEDIA_HANDLER_AWS_BUCKET_NAME") + +nmh_enabled? = ConfigHelper.parse_bool_env_var("NFT_MEDIA_HANDLER_ENABLED") +nmh_remote? = ConfigHelper.parse_bool_env_var("NFT_MEDIA_HANDLER_REMOTE_DISPATCHER_NODE_MODE_ENABLED") +nmh_worker? = ConfigHelper.parse_bool_env_var("NFT_MEDIA_HANDLER_IS_WORKER") +nodes_map = ConfigHelper.parse_json_with_atom_keys_env_var("NFT_MEDIA_HANDLER_NODES_MAP") + +config :nft_media_handler, + enabled?: nmh_enabled?, + tmp_dir: "./temp", + remote?: nmh_remote?, + worker?: nmh_worker?, + nodes_map: nodes_map, + standalone_media_worker?: nmh_enabled? && nmh_remote? && nmh_worker?, + worker_concurrency: ConfigHelper.parse_integer_env_var("NFT_MEDIA_HANDLER_WORKER_CONCURRENCY", 10), + worker_batch_size: ConfigHelper.parse_integer_env_var("NFT_MEDIA_HANDLER_WORKER_BATCH_SIZE", 10), + worker_spawn_tasks_timeout: ConfigHelper.parse_time_env_var("NFT_MEDIA_HANDLER_WORKER_SPAWN_TASKS_TIMEOUT", "100ms"), + cache_uniqueness_name: :cache_uniqueness, + cache_uniqueness_max_size: ConfigHelper.parse_integer_env_var("NFT_MEDIA_HANDLER_CACHE_UNIQUENESS_MAX_SIZE", 100_000) + +config :nft_media_handler, Indexer.NFTMediaHandler.Backfiller, + enabled?: ConfigHelper.parse_bool_env_var("NFT_MEDIA_HANDLER_BACKFILL_ENABLED"), + queue_size: ConfigHelper.parse_integer_env_var("NFT_MEDIA_HANDLER_BACKFILL_QUEUE_SIZE", 1000), + enqueue_busy_waiting_timeout: + ConfigHelper.parse_time_env_var("NFT_MEDIA_HANDLER_BACKFILL_ENQUEUE_BUSY_WAITING_TIMEOUT", "1s") + +config :indexer, Indexer.Fetcher.Zilliqa.ScillaSmartContracts.Supervisor, + disabled?: ConfigHelper.chain_type() != :zilliqa + Code.require_file("#{config_env()}.exs", "config/runtime") for config <- "../apps/*/config/runtime/#{config_env()}.exs" |> Path.expand(__DIR__) |> Path.wildcard() do diff --git a/config/runtime/dev.exs b/config/runtime/dev.exs index 3f2049a296ff..58769568ef12 100644 --- a/config/runtime/dev.exs +++ b/config/runtime/dev.exs @@ -26,6 +26,23 @@ config :block_scout_web, BlockScoutWeb.Endpoint, keyfile: System.get_env("KEYFILE") || "priv/cert/selfsigned_key.pem" ] +config :block_scout_web, BlockScoutWeb.HealthEndpoint, + secret_key_base: + System.get_env("SECRET_KEY_BASE") || "RMgI4C1HSkxsEjdhtGMfwAHfyT6CKWXOgzCboJflfSm4jeAlic52io05KB6mqzc5", + http: [ + port: port + ], + url: [ + scheme: "http", + host: System.get_env("BLOCKSCOUT_HOST", "localhost") + ], + https: [ + port: port + 1, + cipher_suite: :strong, + certfile: System.get_env("CERTFILE") || "priv/cert/selfsigned.pem", + keyfile: System.get_env("KEYFILE") || "priv/cert/selfsigned_key.pem" + ] + ######################## ### Ethereum JSONRPC ### ######################## diff --git a/config/runtime/prod.exs b/config/runtime/prod.exs index 6225e9f09bf0..c7dfad61b438 100644 --- a/config/runtime/prod.exs +++ b/config/runtime/prod.exs @@ -19,6 +19,16 @@ config :block_scout_web, BlockScoutWeb.Endpoint, host: System.get_env("BLOCKSCOUT_HOST") || "localhost" ] +config :block_scout_web, BlockScoutWeb.HealthEndpoint, + secret_key_base: System.get_env("SECRET_KEY_BASE"), + check_origin: System.get_env("CHECK_ORIGIN", "false") == "true" || false, + http: [port: port], + url: [ + scheme: System.get_env("BLOCKSCOUT_PROTOCOL") || "https", + port: port, + host: System.get_env("BLOCKSCOUT_HOST") || "localhost" + ] + ######################## ### Ethereum JSONRPC ### ######################## diff --git a/cspell.json b/cspell.json index 9ca6de7bae94..ebc20d4e4e04 100644 --- a/cspell.json +++ b/cspell.json @@ -15,7 +15,6 @@ ], // useGitignore - use .gitignore to exclude files from checking "useGitignore": true, - // words - list of words to be always considered correct "ignorePaths": [ "apps/block_scout_web/assets/js/lib/ace/src-min/*.js" ], @@ -23,6 +22,7 @@ // Ignore filecoin f410f-like native addresses "f410f[a-z2-7]{39}" ], + // words - list of words to be always considered correct "words": [ "aave", "absname", @@ -52,10 +52,12 @@ "Autonity", "autoplay", "Averify", + "Backfiller", "backoff", "badhash", "badnumber", "badpassword", + "badrpc", "bafybe", "bafybeid", "bafybeig", @@ -70,6 +72,7 @@ "bitmask", "bizbuz", "Blackfort", + "blockaid", "Blockchair", "blockheight", "blockless", @@ -94,6 +97,7 @@ "byts", "bzzr", "cacerts", + "cachex", "callcode", "CALLCODE", "calltracer", @@ -175,6 +179,8 @@ "Denormalized", "descr", "describedby", + "dets", + "dfda", "differenceby", "discordapp", "disksup", @@ -206,8 +212,10 @@ "etimedout", "eveem", "evenodd", + "evision", "evmversion", "exitor", + "exla", "explorable", "exponention", "extcodehash", @@ -360,6 +368,7 @@ "mtransfer", "mult", "multicall", + "multichain", "multis", "munchos", "munknownc", @@ -395,6 +404,7 @@ "Numbe", "Nunito", "nxdomain", + "OFAC", "offchain", "omni", "onclick", @@ -478,6 +488,7 @@ "reqs", "rerequest", "reshows", + "Resizer", "retcode", "retryable", "returnaddress", @@ -532,6 +543,7 @@ "stringly", "stylelint", "stylesheet", + "stylys", "subcall", "subcalls", "subdenomination", @@ -639,6 +651,7 @@ "watchlist", "watchlisted", "watchlists", + "webp", "websockex", "whereis", "whiler", diff --git a/docker-compose/README.md b/docker-compose/README.md index 1a0926a0123c..e06d54a5a8d3 100644 --- a/docker-compose/README.md +++ b/docker-compose/README.md @@ -57,7 +57,7 @@ The repo contains built-in configs for different JSON RPC clients without need t All of the configs assume the Ethereum JSON RPC is running at http://localhost:8545. -In order to stop launched containers, run `docker-compose -d -f config_file.yml down`, replacing `config_file.yml` with the file name of the config which was previously launched. +In order to stop launched containers, run `docker-compose -f config_file.yml down`, replacing `config_file.yml` with the file name of the config which was previously launched. You can adjust BlockScout environment variables: @@ -69,7 +69,7 @@ You can adjust BlockScout environment variables: Descriptions of the ENVs are available -- for [backend](https://docs.blockscout.com/for-developers/information-and-settings/env-variables) +- for [backend](https://docs.blockscout.com/setup/env-variables) - for [frontend](https://github.com/blockscout/frontend/blob/main/docs/ENVS.md). ## Running Docker containers via Makefile diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index 26b21da2da7e..d8e5766b2e46 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -30,7 +30,7 @@ services: context: .. dockerfile: ./docker/Dockerfile args: - RELEASE_VERSION: 6.9.2 + RELEASE_VERSION: 6.10.1 links: - db:database environment: @@ -39,6 +39,18 @@ services: ETHEREUM_JSONRPC_WS_URL: ws://host.docker.internal:8545/ CHAIN_ID: '1337' + nft_media_handler: + depends_on: + - backend + extends: + file: ./services/nft_media_handler.yml + service: nft_media_handler + build: + context: .. + dockerfile: ./docker/Dockerfile + args: + RELEASE_VERSION: 6.10.1 + visualizer: extends: file: ./services/visualizer.yml diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index df4b2670e2c7..11cfb911dc1a 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -174,6 +174,7 @@ INDEXER_DISABLE_TOKEN_INSTANCE_REALTIME_FETCHER=false INDEXER_DISABLE_TOKEN_INSTANCE_RETRY_FETCHER=false INDEXER_DISABLE_TOKEN_INSTANCE_SANITIZE_FETCHER=false INDEXER_DISABLE_TOKEN_INSTANCE_LEGACY_SANITIZE_FETCHER=false +# INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL= INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER=false INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # INDEXER_DISABLE_CATALOGED_TOKEN_UPDATER_FETCHER= @@ -274,6 +275,8 @@ INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # INDEXER_ARBITRUM_TRACKING_MESSAGES_ON_L1_RECHECK_INTERVAL= # INDEXER_ARBITRUM_MISSED_MESSAGES_RECHECK_INTERVAL= # INDEXER_ARBITRUM_MISSED_MESSAGES_BLOCKS_DEPTH= +# ARBITRUM_DA_RECORDS_NORMALIZATION_MIGRATION_BATCH_SIZE= +# ARBITRUM_DA_RECORDS_NORMALIZATION_MIGRATION_CONCURRENCY= # CELO_CORE_CONTRACTS= # INDEXER_CELO_VALIDATOR_GROUP_VOTES_BATCH_SIZE=200000 # INDEXER_DISABLE_CELO_EPOCH_FETCHER=false @@ -308,6 +311,8 @@ INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # INDEXER_OPTIMISM_L1_DEPOSITS_TRANSACTION_TYPE= # INDEXER_OPTIMISM_L1_ETH_GET_LOGS_RANGE_SIZE= # INDEXER_OPTIMISM_L2_ETH_GET_LOGS_RANGE_SIZE= +# INDEXER_OPTIMISM_L2_HOLOCENE_TIMESTAMP= +# INDEXER_OPTIMISM_L2_HOLOCENE_BLOCKS_CHUNK_SIZE= # INDEXER_SCROLL_L1_RPC= # INDEXER_SCROLL_L1_MESSENGER_CONTRACT= # INDEXER_SCROLL_L1_MESSENGER_START_BLOCK= @@ -371,8 +376,12 @@ EXTERNAL_APPS=[] # GAS_PRICE_ORACLE_SAFELOW_TIME_COEFFICIENT= # GAS_PRICE_ORACLE_AVERAGE_TIME_COEFFICIENT= # GAS_PRICE_ORACLE_FAST_TIME_COEFFICIENT= -# RESTRICTED_LIST= -# RESTRICTED_LIST_KEY= +# ADDRESSES_BLACKLIST= +# ADDRESSES_BLACKLIST_KEY= +# ADDRESSES_BLACKLIST_URL= +# ADDRESSES_BLACKLIST_UPDATE_INTERVAL= +# ADDRESSES_BLACKLIST_RETRY_INTERVAL= +# ADDRESSES_BLACKLIST_PROVIDER= SHOW_MAINTENANCE_ALERT=false MAINTENANCE_ALERT_MESSAGE= CHAIN_ID= @@ -409,7 +418,9 @@ MICROSERVICE_ACCOUNT_ABSTRACTION_ENABLED=false MICROSERVICE_ACCOUNT_ABSTRACTION_URL=http://user-ops-indexer:8050/ # MICROSERVICE_METADATA_URL= # MICROSERVICE_METADATA_ENABLED= +# MICROSERVICE_METADATA_PROXY_REQUESTS_TIMEOUT= # MICROSERVICE_STYLUS_VERIFIER_URL= +# MICROSERVICE_MULTICHAIN_SEARCH_URL= DECODE_NOT_A_CONTRACT_CALLS=true # DATABASE_READ_ONLY_API_URL= # ACCOUNT_DATABASE_URL= @@ -447,6 +458,7 @@ EIP_1559_ELASTICITY_MULTIPLIER=2 # TOKEN_TRANSFER_TOKEN_TYPE_MIGRATION_CONCURRENCY= # SANITIZE_INCORRECT_NFT_BATCH_SIZE= # SANITIZE_INCORRECT_NFT_CONCURRENCY= +# SANITIZE_INCORRECT_NFT_TIMEOUT= # MIGRATION_RESTORE_OMITTED_WETH_TOKEN_TRANSFERS_CONCURRENCY= # MIGRATION_RESTORE_OMITTED_WETH_TOKEN_TRANSFERS_BATCH_SIZE= # MIGRATION_RESTORE_OMITTED_WETH_TOKEN_TRANSFERS_TIMEOUT= @@ -454,6 +466,7 @@ EIP_1559_ELASTICITY_MULTIPLIER=2 # MIGRATION_SANITIZE_DUPLICATED_LOG_INDEX_LOGS_BATCH_SIZE= # MIGRATION_REFETCH_CONTRACT_CODES_BATCH_SIZE= # MIGRATION_REFETCH_CONTRACT_CODES_CONCURRENCY= +# MIGRATION_BACKFILL_MULTICHAIN_SEARCH_BATCH_SIZE= SOURCIFY_INTEGRATION_ENABLED=false SOURCIFY_SERVER_URL= SOURCIFY_REPO_URL= @@ -482,9 +495,23 @@ TENDERLY_CHAIN_PATH= # WHITELISTED_WETH_CONTRACTS= # SANITIZE_INCORRECT_WETH_BATCH_SIZE=100 # SANITIZE_INCORRECT_WETH_CONCURRENCY=1 +# SANITIZE_INCORRECT_WETH_TIMEOUT= +# REINDEX_INTERNAL_TRANSACTIONS_STATUS_BATCH_SIZE= +# REINDEX_INTERNAL_TRANSACTIONS_STATUS_CONCURRENCY= +# REINDEX_INTERNAL_TRANSACTIONS_STATUS_TIMEOUT= # PUBLIC_METRICS_ENABLED= # PUBLIC_METRICS_UPDATE_PERIOD_HOURS= # CSV_EXPORT_LIMIT= # SHRINK_INTERNAL_TRANSACTIONS_ENABLED= # SHRINK_INTERNAL_TRANSACTIONS_BATCH_SIZE= # SHRINK_INTERNAL_TRANSACTIONS_CONCURRENCY= +NFT_MEDIA_HANDLER_ENABLED=true +NFT_MEDIA_HANDLER_REMOTE_DISPATCHER_NODE_MODE_ENABLED=true +RELEASE_NODE=producer@172.18.0.4 +RELEASE_DISTRIBUTION=name +RELEASE_COOKIE=secret_cookie +# NFT_MEDIA_HANDLER_AWS_PUBLIC_BUCKET_URL= +# NFT_MEDIA_HANDLER_BACKFILL_ENABLED= +# NFT_MEDIA_HANDLER_BACKFILL_QUEUE_SIZE= +# NFT_MEDIA_HANDLER_BACKFILL_ENQUEUE_BUSY_WAITING_TIMEOUT= +# NFT_MEDIA_HANDLER_CACHE_UNIQUENESS_MAX_SIZE= \ No newline at end of file diff --git a/docker-compose/envs/common-nft-media-handler.env b/docker-compose/envs/common-nft-media-handler.env new file mode 100644 index 000000000000..fd68542cf31b --- /dev/null +++ b/docker-compose/envs/common-nft-media-handler.env @@ -0,0 +1,19 @@ +# NFT_MEDIA_HANDLER_AWS_ACCESS_KEY_ID= +# NFT_MEDIA_HANDLER_AWS_SECRET_ACCESS_KEY= +# NFT_MEDIA_HANDLER_AWS_BUCKET_HOST= +# NFT_MEDIA_HANDLER_AWS_PUBLIC_BUCKET_URL= +# NFT_MEDIA_HANDLER_AWS_BUCKET_NAME= + + +NFT_MEDIA_HANDLER_ENABLED=true +NFT_MEDIA_HANDLER_REMOTE_DISPATCHER_NODE_MODE_ENABLED=true +NFT_MEDIA_HANDLER_IS_WORKER=true +NFT_MEDIA_HANDLER_NODES_MAP="{\"producer@172.18.0.4\": \"/folder_1\"}" +# NFT_MEDIA_HANDLER_WORKER_CONCURRENCY= +# NFT_MEDIA_HANDLER_WORKER_BATCH_SIZE= +# NFT_MEDIA_HANDLER_WORKER_SPAWN_TASKS_TIMEOUT= + + +RELEASE_NODE=worker@0.0.0.0 +RELEASE_DISTRIBUTION=name +RELEASE_COOKIE=secret_cookie \ No newline at end of file diff --git a/docker-compose/no-services.yml b/docker-compose/no-services.yml index d1608dba98c7..2d040f3eabe8 100644 --- a/docker-compose/no-services.yml +++ b/docker-compose/no-services.yml @@ -37,7 +37,7 @@ services: CACHE_TOTAL_GAS_USAGE_COUNTER_ENABLED: "" CACHE_ADDRESS_WITH_BALANCES_UPDATE_INTERVAL: "" ADMIN_PANEL_ENABLED: "" - RELEASE_VERSION: 6.9.0 + RELEASE_VERSION: 6.10.1 links: - db:database environment: diff --git a/docker-compose/services/backend.yml b/docker-compose/services/backend.yml index cf8a0871d366..46fe299ab24d 100644 --- a/docker-compose/services/backend.yml +++ b/docker-compose/services/backend.yml @@ -13,4 +13,5 @@ services: env_file: - ../envs/common-blockscout.env volumes: - - ./logs/:/app/logs/ \ No newline at end of file + - ./logs/:/app/logs/ + - ./dets/:/app/dets/ \ No newline at end of file diff --git a/docker-compose/services/db.yml b/docker-compose/services/db.yml index 430409bbecfe..2db8647d13dc 100644 --- a/docker-compose/services/db.yml +++ b/docker-compose/services/db.yml @@ -2,7 +2,7 @@ version: '3.9' services: db-init: - image: postgres:15 + image: postgres:17 volumes: - ./blockscout-db-data:/var/lib/postgresql/data entrypoint: @@ -12,7 +12,7 @@ services: chown -R 2000:2000 /var/lib/postgresql/data db: - image: postgres:15 + image: postgres:17 user: 2000:2000 shm_size: 256m restart: always diff --git a/docker-compose/services/nft_media_handler.yml b/docker-compose/services/nft_media_handler.yml new file mode 100644 index 000000000000..cd2c69084b53 --- /dev/null +++ b/docker-compose/services/nft_media_handler.yml @@ -0,0 +1,16 @@ +version: '3.9' + +services: + nft_media_handler: + image: blockscout/${DOCKER_REPO:-blockscout}:${DOCKER_TAG:-latest} + pull_policy: always + restart: always + stop_grace_period: 5m + container_name: 'nft_media_handler' + command: sh -c "bin/blockscout start" + extra_hosts: + - 'host.docker.internal:host-gateway' + env_file: + - ../envs/common-nft-media-handler.env + volumes: + - ./logs/:/app/logs/ \ No newline at end of file diff --git a/docker-compose/services/stats.yml b/docker-compose/services/stats.yml index d33723b83ea0..d4f7e8dffcd6 100644 --- a/docker-compose/services/stats.yml +++ b/docker-compose/services/stats.yml @@ -2,7 +2,7 @@ version: '3.9' services: stats-db-init: - image: postgres:15 + image: postgres:17 volumes: - ./stats-db-data:/var/lib/postgresql/data entrypoint: @@ -12,7 +12,7 @@ services: chown -R 2000:2000 /var/lib/postgresql/data stats-db: - image: postgres:15 + image: postgres:17 user: 2000:2000 shm_size: 256m restart: always diff --git a/docker/Dockerfile b/docker/Dockerfile index 79fe70743d08..69ca66a768f6 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -12,6 +12,7 @@ COPY apps/explorer/mix.exs ./apps/explorer/ COPY apps/ethereum_jsonrpc/mix.exs ./apps/ethereum_jsonrpc/ COPY apps/indexer/mix.exs ./apps/indexer/ COPY apps/utils/mix.exs ./apps/utils/ +COPY apps/nft_media_handler/mix.exs ./apps/nft_media_handler/ ENV MIX_ENV="prod" ENV MIX_HOME=/opt/mix @@ -98,6 +99,6 @@ COPY --from=builder --chown=${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} /app/config/c COPY --from=builder --chown=${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} /app/config/config_helper.exs /app/releases/${RELEASE_VERSION}/config_helper.exs COPY --from=builder --chown=${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} /app/config/assets/precompiles-arbitrum.json ./config/assets/precompiles-arbitrum.json -RUN chown -R ${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} /app +RUN mkdir dets && mkdir temp && chown -R ${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} /app USER ${BLOCKSCOUT_USER}:${BLOCKSCOUT_GROUP} diff --git a/docker/Makefile b/docker/Makefile index 9b79726b5872..dadc2149cde5 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -10,7 +10,7 @@ STATS_CONTAINER_NAME := stats STATS_DB_CONTAINER_NAME := stats-db PROXY_CONTAINER_NAME := proxy PG_CONTAINER_NAME := postgres -RELEASE_VERSION ?= '6.9.2' +RELEASE_VERSION ?= '6.10.1' TAG := $(RELEASE_VERSION)-commit-$(shell git log -1 --pretty=format:"%h") STABLE_TAG := $(RELEASE_VERSION) diff --git a/mix.exs b/mix.exs index aeb99f19332f..588edf05956c 100644 --- a/mix.exs +++ b/mix.exs @@ -7,11 +7,11 @@ defmodule BlockScout.Mixfile do [ # app: :block_scout, # aliases: aliases(config_env()), - version: "6.9.2", + version: "6.10.1", apps_path: "apps", deps: deps(), dialyzer: dialyzer(), - elixir: "~> 1.13", + elixir: "~> 1.17", preferred_cli_env: [ credo: :test, dialyzer: :test @@ -24,7 +24,8 @@ defmodule BlockScout.Mixfile do ethereum_jsonrpc: :permanent, explorer: :permanent, indexer: :permanent, - utils: :permanent + utils: :permanent, + nft_media_handler: :permanent ], steps: [:assemble, ©_prod_runtime_config/1], validate_compile_env: false @@ -97,7 +98,7 @@ defmodule BlockScout.Mixfile do {:absinthe_plug, git: "https://github.com/blockscout/absinthe_plug.git", tag: "1.5.8", override: true}, {:tesla, "~> 1.13.0"}, # Documentation - {:ex_doc, "~> 0.35.1", only: :dev, runtime: false}, + {:ex_doc, "~> 0.36.1", only: :dev, runtime: false}, {:number, "~> 1.0.3"} ] end diff --git a/mix.lock b/mix.lock index 7a518d99dd38..d4539a8dc03f 100644 --- a/mix.lock +++ b/mix.lock @@ -15,47 +15,54 @@ "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, "bureaucrat": {:hex, :bureaucrat, "0.2.10", "b0de157dad540e40007b663b683f716ced21f85ff0591093aadb209ad0d967e1", [:mix], [{:inflex, ">= 1.10.0", [hex: :inflex, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.2.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, ">= 1.0.0", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 1.5 or ~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "bc7e5162b911c29c8ebefee87a2c16fbf13821a58f448a8fd024eb6c17fae15c"}, "bypass": {:hex, :bypass, "2.1.0", "909782781bf8e20ee86a9cabde36b259d44af8b9f38756173e8f5e2e1fabb9b1", [:mix], [{:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "d9b5df8fa5b7a6efa08384e9bbecfe4ce61c77d28a4282f79e02f1ef78d96b80"}, - "castore": {:hex, :castore, "1.0.10", "43bbeeac820f16c89f79721af1b3e092399b3a1ecc8df1a472738fd853574911", [:mix], [], "hexpm", "1b0b7ea14d889d9ea21202c43a4fa015eb913021cb535e8ed91946f4b77a8848"}, + "cachex": {:hex, :cachex, "4.0.3", "95e88c3ef4d37990948eaecccefe40b4ce4a778e0d7ade29081e6b7a89309ee2", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "d5d632da7f162f8a190f1c39b712c0ebc9cf0007c4e2029d44eddc8041b52d55"}, + "castore": {:hex, :castore, "1.0.11", "4bbd584741601eb658007339ea730b082cc61f3554cf2e8f39bf693a11b49073", [:mix], [], "hexpm", "e03990b4db988df56262852f20de0f659871c35154691427a5047f4967a16a62"}, "cbor": {:hex, :cbor, "1.0.1", "39511158e8ea5a57c1fcb9639aaa7efde67129678fee49ebbda780f6f24959b0", [:mix], [], "hexpm", "5431acbe7a7908f17f6a9cd43311002836a34a8ab01876918d8cfb709cd8b6a2"}, + "cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"}, "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"}, "cldr_utils": {:hex, :cldr_utils, "2.28.2", "f500667164a9043369071e4f9dcef31f88b8589b2e2c07a1eb9f9fa53cb1dce9", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "c506eb1a170ba7cdca59b304ba02a56795ed119856662f6b1a420af80ec42551"}, "cloak": {:hex, :cloak, "1.1.4", "aba387b22ea4d80d92d38ab1890cc528b06e0e7ef2a4581d71c3fdad59e997e7", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "92b20527b9aba3d939fab0dd32ce592ff86361547cfdc87d74edce6f980eb3d7"}, "cloak_ecto": {:hex, :cloak_ecto, "1.3.0", "0de127c857d7452ba3c3367f53fb814b0410ff9c680a8d20fbe8b9a3c57a1118", [:mix], [{:cloak, "~> 1.1.1", [hex: :cloak, repo: "hexpm", optional: false]}, {:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "314beb0c123b8a800418ca1d51065b27ba3b15f085977e65c0f7b2adab2de1cc"}, - "coerce": {:hex, :coerce, "1.0.1", "211c27386315dc2894ac11bc1f413a0e38505d808153367bd5c6e75a4003d096", [:mix], [], "hexpm", "b44a691700f7a1a15b4b7e2ff1fa30bebd669929ac8aa43cffe9e2f8bf051cf1"}, "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, "comeonin": {:hex, :comeonin, "5.5.0", "364d00df52545c44a139bad919d7eacb55abf39e86565878e17cebb787977368", [:mix], [], "hexpm", "6287fc3ba0aad34883cbe3f7949fc1d1e738e5ccdce77165bc99490aa69f47fb"}, + "complex": {:hex, :complex, "0.5.0", "af2d2331ff6170b61bb738695e481b27a66780e18763e066ee2cd863d0b1dd92", [:mix], [], "hexpm", "2683bd3c184466cfb94fad74cbfddfaa94b860e27ad4ca1bffe3bff169d91ef1"}, "con_cache": {:hex, :con_cache, "1.1.1", "9f47a68dfef5ac3bbff8ce2c499869dbc5ba889dadde6ac4aff8eb78ddaf6d82", [:mix], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "1def4d1bec296564c75b5bbc60a19f2b5649d81bfa345a2febcc6ae380e8ae15"}, "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, "cors_plug": {:hex, :cors_plug, "3.0.3", "7c3ac52b39624bc616db2e937c282f3f623f25f8d550068b6710e58d04a0e330", [:mix], [{:plug, "~> 1.13", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "3f2d759e8c272ed3835fab2ef11b46bddab8c1ab9528167bd463b6452edf830d"}, "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, - "credo": {:hex, :credo, "1.7.10", "6e64fe59be8da5e30a1b96273b247b5cf1cc9e336b5fd66302a64b25749ad44d", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "71fbc9a6b8be21d993deca85bf151df023a3097b01e09a2809d460348561d8cd"}, + "credo": {:hex, :credo, "1.7.11", "d3e805f7ddf6c9c854fd36f089649d7cf6ba74c42bc3795d587814e3c9847102", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "56826b4306843253a66e47ae45e98e7d284ee1f95d53d1612bb483f88a8cf219"}, "csv": {:hex, :csv, "2.5.0", "c47b5a5221bf2e56d6e8eb79e77884046d7fd516280dc7d9b674251e0ae46246", [:mix], [{:parallel_stream, "~> 1.0.4 or ~> 1.1.0", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm", "e821f541487045c7591a1963eeb42afff0dfa99bdcdbeb3410795a2f59c77d34"}, - "dataloader": {:hex, :dataloader, "2.0.1", "fa06b057b432b993203003fbff5ff040b7f6483a77e732b7dfc18f34ded2634f", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "da7ff00890e1b14f7457419b9508605a8e66ae2cc2d08c5db6a9f344550efa11"}, + "dataloader": {:hex, :dataloader, "2.0.2", "c45075e0692e68638a315e14f747bd8d7065fb5f38705cf980f62d4cd344401f", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4c6cabc0b55e96e7de74d14bf37f4a5786f0ab69aa06764a1f39dda40079b098"}, "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"}, - "decimal": {:hex, :decimal, "2.2.0", "df3d06bb9517e302b1bd265c1e7f16cda51547ad9d99892049340841f3e15836", [:mix], [], "hexpm", "af8daf87384b51b7e611fb1a1f2c4d4876b65ef968fa8bd3adf44cff401c7f21"}, + "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, "decorator": {:hex, :decorator, "1.4.0", "a57ac32c823ea7e4e67f5af56412d12b33274661bb7640ec7fc882f8d23ac419", [:mix], [], "hexpm", "0a07cedd9083da875c7418dea95b78361197cf2bf3211d743f6f7ce39656597f"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, "digital_token": {:hex, :digital_token, "1.0.0", "454a4444061943f7349a51ef74b7fb1ebd19e6a94f43ef711f7dae88c09347df", [:mix], [{:cldr_utils, "~> 2.17", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "8ed6f5a8c2fa7b07147b9963db506a1b4c7475d9afca6492136535b064c9e9e6"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.42", "f23d856f41919f17cd06a493923a722d87a2d684f143a1e663c04a2b93100682", [:mix], [], "hexpm", "6915b6ca369b5f7346636a2f41c6a6d78b5af419d61a611079189233358b8b8b"}, "ecto": {:hex, :ecto, "3.12.5", "4a312960ce612e17337e7cefcf9be45b95a3be6b36b6f94dfb3d8c361d631866", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6eb18e80bef8bb57e17f5a7f068a1719fbda384d40fc37acb8eb8aeca493b6ea"}, "ecto_sql": {:hex, :ecto_sql, "3.12.1", "c0d0d60e85d9ff4631f12bafa454bc392ce8b9ec83531a412c12a0d415a3a4d0", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aff5b958a899762c5f09028c847569f7dfb9cc9d63bdb8133bff8a5546de6bf5"}, - "elixir_make": {:hex, :elixir_make, "0.8.4", "4960a03ce79081dee8fe119d80ad372c4e7badb84c493cc75983f9d3bc8bde0f", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "6e7f1d619b5f61dfabd0a20aa268e575572b542ac31723293a4c1a567d5ef040"}, + "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, - "ex_abi": {:hex, :ex_abi, "0.8.1", "451fa960ddc4dfbb350e13509f3dd64ca586b8484a77aad9f7d778161b5eab79", [:mix], [{:ex_keccak, "~> 0.7.5", [hex: :ex_keccak, repo: "hexpm", optional: true]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "abcf53d556c2948e5c1241340afd4a72cdf93ab6daef16fc200c16ca1183cdca"}, - "ex_cldr": {:hex, :ex_cldr, "2.40.1", "c1fcb0cd9d2a70d28f4540a99f32127e7f1813e0db109d65ab29dea5337ae266", [:mix], [{:cldr_utils, "~> 2.28", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "509810702e8e81991851d9426ffe6b34b48b7b9baa12922e7b3fb8f6368606f3"}, + "eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"}, + "evision": {:hex, :evision, "0.2.9", "c16a40e2d50aaaa82c109393aab99dc05cde9a8dd1e6d0644d7b012cf88e5a57", [:make, :mix, :rebar3], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:kino, "~> 0.11", [hex: :kino, repo: "hexpm", optional: true]}, {:nx, "~> 0.6", [hex: :nx, repo: "hexpm", optional: false]}, {:progress_bar, "~> 2.0 or ~> 3.0", [hex: :progress_bar, repo: "hexpm", optional: true]}], "hexpm", "525134bc8e5e33906b9ddf31e75f83dbb3a33853796bdd1b816d3b4f8d101686"}, + "ex_abi": {:hex, :ex_abi, "0.8.2", "66812c921b236c68bb3d8bcfcb759c5715322064ad150d715f18f2f65e0e0648", [:mix], [{:ex_keccak, "~> 0.7.6", [hex: :ex_keccak, repo: "hexpm", optional: true]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "db785ad43c24d4d7015d3070611eb3e2bd88fa96b614cab10cb42401c94e1e74"}, + "ex_aws": {:hex, :ex_aws, "2.5.8", "0393cfbc5e4a9e7017845451a015d836a670397100aa4c86901980e2a2c5f7d4", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:req, "~> 0.3", [hex: :req, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8f79777b7932168956c8cc3a6db41f5783aa816eb50de356aed3165a71e5f8c3"}, + "ex_aws_s3": {:hex, :ex_aws_s3, "2.5.6", "d135983bbd8b6df6350dfd83999437725527c1bea151e5055760bfc9b2d17c20", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "9874e12847e469ca2f13a5689be04e546c16f63caf6380870b7f25bf7cb98875"}, + "ex_cldr": {:hex, :ex_cldr, "2.40.2", "1361d06624d7533fb3a2df59c82c7108b18ef55e884f48d503597fce4ce28d97", [:mix], [{:cldr_utils, "~> 2.28", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "cd9039ca9a7c61b99c053a16bd2201ebd7d1c87b49499a4c6d761ec14bca4442"}, "ex_cldr_currencies": {:hex, :ex_cldr_currencies, "2.16.3", "1ec6444b5d0c0aabba5a3bc321d73f1c9c751c6add92e7fb7775ccc071d96bd8", [:mix], [{:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "4d1b5f8449fdf0ece6a2e5c7401ad8fcfde77ee6ea480bddc16e266dfa2b570c"}, "ex_cldr_lists": {:hex, :ex_cldr_lists, "2.11.1", "ad18f861d7c5ca82aac6d173469c6a2339645c96790172ab0aa255b64fb7303b", [:mix], [{:ex_cldr_numbers, "~> 2.25", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "00161c04510ccb3f18b19a6b8562e50c21f1e9c15b8ff4c934bea5aad0b4ade2"}, "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.33.4", "ecb06f40fc63f484a53d4ea80e1bdd6860ec44d3032f2b10b17340d34c0a13d5", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:digital_token, "~> 0.3 or ~> 1.0", [hex: :digital_token, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.16", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "d15b7e217e9e60c328e73045e51dc67d7ac5d2997247b833efab2c69b2ed06f5"}, "ex_cldr_units": {:hex, :ex_cldr_units, "3.17.2", "b0483d5c61c6c8649aafdcafc7372dd71a7a30f52dd4c9b072576467bf721454", [:mix], [{:cldr_utils, "~> 2.25", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.10", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.33.0", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "457d76c6e3b548bd7aba3c7b5d157213be2842d1162c2283abf81d9e2f1e1fc7"}, - "ex_doc": {:hex, :ex_doc, "0.35.1", "de804c590d3df2d9d5b8aec77d758b00c814b356119b3d4455e4b8a8687aecaf", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "2121c6402c8d44b05622677b761371a759143b958c6c19f6558ff64d0aed40df"}, + "ex_doc": {:hex, :ex_doc, "0.36.1", "4197d034f93e0b89ec79fac56e226107824adcce8d2dd0a26f5ed3a95efc36b1", [:mix], [{:earmark_parser, "~> 1.4.42", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "d7d26a7cf965dacadcd48f9fa7b5953d7d0cfa3b44fa7a65514427da44eafd89"}, + "ex_hash_ring": {:hex, :ex_hash_ring, "6.0.4", "bef9d2d796afbbe25ab5b5a7ed746e06b99c76604f558113c273466d52fa6d6b", [:mix], [], "hexpm", "89adabf31f7d3dfaa36802ce598ce918e9b5b33bae8909ac1a4d052e1e567d18"}, "ex_json_schema": {:hex, :ex_json_schema, "0.10.2", "7c4b8c1481fdeb1741e2ce66223976edfb9bccebc8014f6aec35d4efe964fb71", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "37f43be60f8407659d4d0155a7e45e7f406dab1f827051d3d35858a709baf6a6"}, - "ex_keccak": {:hex, :ex_keccak, "0.7.5", "f3b733173510d48ae9a1ea1de415e694b2651f35c787e63f33b5ed0013fbfd35", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.7", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "8a5e1cb7f96fff5e480ff6a121477b90c4fd8c150984086dffd98819f5d83763"}, + "ex_keccak": {:hex, :ex_keccak, "0.7.6", "110c3ed76b55265975d9ae6628205b8a026f11fe081f3073e00c29aab2e91473", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.8", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "9d1568424eb7b995e480d1b7f0c1e914226ee625496600abb922bba6f5cdc5e4"}, "ex_machina": {:hex, :ex_machina, "2.8.0", "a0e847b5712065055ec3255840e2c78ef9366634d62390839d4880483be38abe", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "79fe1a9c64c0c1c1fab6c4fa5d871682cb90de5885320c187d117004627a7729"}, "ex_rlp": {:hex, :ex_rlp, "0.6.0", "985391d2356a7cb8712a4a9a2deb93f19f2fbca0323f5c1203fcaf64d077e31e", [:mix], [], "hexpm", "7135db93b861d9e76821039b60b00a6a22d2c4e751bf8c444bffe7a042f1abaf"}, - "ex_secp256k1": {:hex, :ex_secp256k1, "0.7.3", "489b6fd57fab5a7259ace927d8cb5171aad835e9c7279c2a859a0483cb069415", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.7", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "ea63159442f4d8143166cd1507da03edc43216d6e7c6bac4b416bdce04f0daa8"}, + "ex_secp256k1": {:hex, :ex_secp256k1, "0.7.4", "d771a74917eb21e9f02e52bb1d0ac9aba4e2b5118d7f515014d0a311c990e323", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.8", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "465fd788c83c24d2df47f302e8fb1011054c81a905345e377c957b159a783bfc"}, "ex_utils": {:hex, :ex_utils, "0.1.7", "2c133e0bcdc49a858cf8dacf893308ebc05bc5fba501dc3d2935e65365ec0bf3", [:mix], [], "hexpm", "66d4fe75285948f2d1e69c2a5ddd651c398c813574f8d36a9eef11dc20356ef6"}, "exactor": {:hex, :exactor, "2.2.4", "5efb4ddeb2c48d9a1d7c9b465a6fffdd82300eb9618ece5d34c3334d5d7245b1", [:mix], [], "hexpm", "1222419f706e01bfa1095aec9acf6421367dcfab798a6f67c54cf784733cd6b5"}, "exjsx": {:hex, :exjsx, "4.0.0", "60548841e0212df401e38e63c0078ec57b33e7ea49b032c796ccad8cde794b5c", [:mix], [{:jsx, "~> 2.8.0", [hex: :jsx, repo: "hexpm", optional: false]}], "hexpm", "32e95820a97cffea67830e91514a2ad53b888850442d6d395f53a1ac60c82e07"}, @@ -64,27 +71,29 @@ "file_info": {:hex, :file_info, "0.0.4", "2e0e77f211e833f38ead22cb29ce53761d457d80b3ffe0ffe0eb93880b0963b2", [:mix], [{:mimetype_parser, "~> 0.1.2", [hex: :mimetype_parser, repo: "hexpm", optional: false]}], "hexpm", "50e7ad01c2c8b9339010675fe4dc4a113b8d6ca7eddce24d1d74fd0e762781a5"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"}, - "floki": {:hex, :floki, "0.36.3", "1102f93b16a55bc5383b85ae3ec470f82dee056eaeff9195e8afdf0ef2a43c30", [:mix], [], "hexpm", "fe0158bff509e407735f6d40b3ee0d7deb47f3f3ee7c6c182ad28599f9f6b27a"}, + "floki": {:hex, :floki, "0.37.0", "b83e0280bbc6372f2a403b2848013650b16640cd2470aea6701f0632223d719e", [:mix], [], "hexpm", "516a0c15a69f78c47dc8e0b9b3724b29608aa6619379f91b1ffa47109b5d0dd3"}, "flow": {:hex, :flow, "1.2.4", "1dd58918287eb286656008777cb32714b5123d3855956f29aa141ebae456922d", [:mix], [{:gen_stage, "~> 1.0", [hex: :gen_stage, repo: "hexpm", optional: false]}], "hexpm", "874adde96368e71870f3510b91e35bc31652291858c86c0e75359cbdd35eb211"}, "gen_stage": {:hex, :gen_stage, "1.2.1", "19d8b5e9a5996d813b8245338a28246307fd8b9c99d1237de199d21efc4c76a1", [:mix], [], "hexpm", "83e8be657fa05b992ffa6ac1e3af6d57aa50aace8f691fcf696ff02f8335b001"}, "gettext": {:hex, :gettext, "0.26.2", "5978aa7b21fada6deabf1f6341ddba50bc69c999e812211903b169799208f2a8", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "aa978504bcf76511efdc22d580ba08e2279caab1066b76bb9aa81c4a1e0a32a5"}, "hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~>2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"}, "hammer": {:hex, :hammer, "6.2.1", "5ae9c33e3dceaeb42de0db46bf505bd9c35f259c8defb03390cd7556fea67ee2", [:mix], [{:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}], "hexpm", "b9476d0c13883d2dc0cc72e786bac6ac28911fba7cc2e04b70ce6a6d9c4b2bdc"}, - "hammer_backend_redis": {:hex, :hammer_backend_redis, "6.1.2", "eb296bb4924928e24135308b2afc189201fd09411c870c6bbadea444a49b2f2c", [:mix], [{:hammer, "~> 6.0", [hex: :hammer, repo: "hexpm", optional: false]}, {:redix, "~> 1.1", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "217ea066278910543a5e9b577d5bf2425419446b94fe76bdd9f255f39feec9fa"}, + "hammer_backend_redis": {:hex, :hammer_backend_redis, "6.2.0", "f39a9c8491387cdf719a38593311537e3e0251ca54725b6ee9145406821f39d2", [:mix], [{:hammer, "~> 6.0", [hex: :hammer, repo: "hexpm", optional: false]}, {:redix, "~> 1.1", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "9965d55705d7ca7412bb0685f5cd44fc47d103bf388abc50438e71974c36c9fa"}, "hpax": {:hex, :hpax, "1.0.0", "28dcf54509fe2152a3d040e4e3df5b265dcb6cb532029ecbacf4ce52caea3fd2", [:mix], [], "hexpm", "7f1314731d711e2ca5fdc7fd361296593fc2542570b3105595bb0bc6d0fad601"}, "html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"}, "httpoison": {:hex, :httpoison, "2.2.1", "87b7ed6d95db0389f7df02779644171d7319d319178f6680438167d7b69b1f3d", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "51364e6d2f429d80e14fe4b5f8e39719cacd03eb3f9a9286e61e216feac2d2df"}, "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, + "image": {:hex, :image, "0.55.2", "f21b5341ee05dfe2e0f649c34c6335cbce44be55e3ce3ced404ac008bef6c335", [:mix], [{:bumblebee, "~> 0.3", [hex: :bumblebee, repo: "hexpm", optional: true]}, {:evision, "~> 0.1.33 or ~> 0.2", [hex: :evision, repo: "hexpm", optional: true]}, {:exla, "~> 0.5", [hex: :exla, repo: "hexpm", optional: true]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:kino, "~> 0.13", [hex: :kino, repo: "hexpm", optional: true]}, {:nx, "~> 0.7", [hex: :nx, repo: "hexpm", optional: true]}, {:nx_image, "~> 0.1", [hex: :nx_image, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.1 or ~> 3.2 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.13", [hex: :plug, repo: "hexpm", optional: true]}, {:req, "~> 0.4", [hex: :req, repo: "hexpm", optional: true]}, {:rustler, "> 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:scholar, "~> 0.3", [hex: :scholar, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: false]}, {:vix, "~> 0.23", [hex: :vix, repo: "hexpm", optional: false]}], "hexpm", "aa126e45b514810d1af89eded505ed3e523acefbb005f6220f8fbc1955904607"}, "inflex": {:hex, :inflex, "2.1.0", "a365cf0821a9dacb65067abd95008ca1b0bb7dcdd85ae59965deef2aa062924c", [:mix], [], "hexpm", "14c17d05db4ee9b6d319b0bff1bdf22aa389a25398d1952c7a0b5f3d93162dd8"}, "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, "joken": {:hex, :joken, "2.6.2", "5daaf82259ca603af4f0b065475099ada1b2b849ff140ccd37f4b6828ca6892a", [:mix], [{:jose, "~> 1.11.10", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "5134b5b0a6e37494e46dbf9e4dad53808e5e787904b7c73972651b51cce3d72b"}, "jose": {:hex, :jose, "1.11.10", "a903f5227417bd2a08c8a00a0cbcc458118be84480955e8d251297a425723f83", [:mix, :rebar3], [], "hexpm", "0d6cd36ff8ba174db29148fc112b5842186b68a90ce9fc2b3ec3afe76593e614"}, "jsx": {:hex, :jsx, "2.8.3", "a05252d381885240744d955fbe3cf810504eb2567164824e19303ea59eef62cf", [:mix, :rebar3], [], "hexpm", "fc3499fed7a726995aa659143a248534adc754ebd16ccd437cd93b649a95091f"}, + "jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"}, "junit_formatter": {:hex, :junit_formatter, "3.4.0", "d0e8db6c34dab6d3c4154c3b46b21540db1109ae709d6cf99ba7e7a2ce4b1ac2", [:mix], [], "hexpm", "bb36e2ae83f1ced6ab931c4ce51dd3dbef1ef61bb4932412e173b0cfa259dacd"}, "logger_file_backend": {:hex, :logger_file_backend, "0.0.14", "774bb661f1c3fed51b624d2859180c01e386eb1273dc22de4f4a155ef749a602", [:mix], [], "hexpm", "071354a18196468f3904ef09413af20971d55164267427f6257b52cfba03f9e6"}, "logger_json": {:hex, :logger_json, "5.1.4", "9e30a4f2e31a8b9e402bdc20bd37cf9b67d3a31f19d0b33082a19a06b4c50f6d", [:mix], [{:ecto, "~> 2.1 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "3f20eea58e406a33d3eb7814c7dff5accb503bab2ee8601e84da02976fa3934c"}, "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, - "makeup_elixir": {:hex, :makeup_elixir, "1.0.0", "74bb8348c9b3a51d5c589bf5aebb0466a84b33274150e3b6ece1da45584afc82", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "49159b7d7d999e836bedaf09dcf35ca18b312230cf901b725a64f3f42e407983"}, + "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, "makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"}, "math": {:hex, :math, "0.7.0", "12af548c3892abf939a2e242216c3e7cbfb65b9b2fe0d872d05c6fb609f8127b", [:mix], [], "hexpm", "7987af97a0c6b58ad9db43eb5252a49fc1dfe1f6d98f17da9282e297f594ebc2"}, "meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"}, @@ -95,7 +104,7 @@ "mimetype_parser": {:hex, :mimetype_parser, "0.1.3", "628ac9fe56aa7edcedb534d68397dd66674ab82493c8ebe39acb9a19b666099d", [:mix], [], "hexpm", "7d8f80c567807ce78cd93c938e7f4b0a20b1aaaaab914bf286f68457d9f7a852"}, "mint": {:hex, :mint, "1.6.2", "af6d97a4051eee4f05b5500671d47c3a67dac7386045d87a904126fd4bbcea2e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "5ee441dffc1892f1ae59127f74afe8fd82fda6587794278d924e4d90ea3d63f9"}, "mix_erlang_tasks": {:hex, :mix_erlang_tasks, "0.1.0", "36819fec60b80689eb1380938675af215565a89320a9e29c72c70d97512e4649", [:mix], [], "hexpm", "95d2839c422c482a70c08a8702da8242f86b773f8ab6e8602a4eb72da8da04ed"}, - "mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"}, + "mock": {:hex, :mock, "0.3.9", "10e44ad1f5962480c5c9b9fa779c6c63de9bd31997c8e04a853ec990a9d841af", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "9e1b244c4ca2551bb17bb8415eed89e40ee1308e0fbaed0a4fdfe3ec8a4adbd3"}, "mox": {:hex, :mox, "1.1.0", "0f5e399649ce9ab7602f72e718305c0f9cdc351190f72844599545e4996af73c", [:mix], [], "hexpm", "d44474c50be02d5b72131070281a5d3895c0e7a95c780e90bc0cfe712f633a13"}, "msgpax": {:hex, :msgpax, "2.4.0", "4647575c87cb0c43b93266438242c21f71f196cafa268f45f91498541148c15d", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "ca933891b0e7075701a17507c61642bf6e0407bb244040d5d0a58597a06369d2"}, "nimble_csv": {:hex, :nimble_csv, "1.2.0", "4e26385d260c61eba9d4412c71cea34421f296d5353f914afe3f2e71cce97722", [:mix], [], "hexpm", "d0628117fcc2148178b034044c55359b26966c6eaa8e2ce15777be3bbc91b12a"}, @@ -104,14 +113,16 @@ "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, "number": {:hex, :number, "1.0.5", "d92136f9b9382aeb50145782f116112078b3465b7be58df1f85952b8bb399b0f", [:mix], [{:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "c0733a0a90773a66582b9e92a3f01290987f395c972cb7d685f51dd927cd5169"}, "numbers": {:hex, :numbers, "5.2.4", "f123d5bb7f6acc366f8f445e10a32bd403c8469bdbce8ce049e1f0972b607080", [:mix], [{:coerce, "~> 1.0", [hex: :coerce, repo: "hexpm", optional: false]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "eeccf5c61d5f4922198395bf87a465b6f980b8b862dd22d28198c5e6fab38582"}, + "nx": {:hex, :nx, "0.9.2", "17563029c01bf749aad3c31234326d7665abd0acc33ee2acbe531a4759f29a8a", [:mix], [{:complex, "~> 0.5", [hex: :complex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "914d74741617d8103de8ab1f8c880353e555263e1c397b8a1109f79a3716557f"}, "oauth2": {:hex, :oauth2, "2.1.0", "beb657f393814a3a7a8a15bd5e5776ecae341fd344df425342a3b6f1904c2989", [:mix], [{:tesla, "~> 1.5", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "8ac07f85b3307dd1acfeb0ec852f64161b22f57d0ce0c15e616a1dfc8ebe2b41"}, "optimal": {:hex, :optimal, "0.3.6", "46bbf52fbbbd238cda81e02560caa84f93a53c75620f1fe19e81e4ae7b07d1dd", [:mix], [], "hexpm", "1a06ea6a653120226b35b283a1cd10039550f2c566edcdec22b29316d73640fd"}, "parallel_stream": {:hex, :parallel_stream, "1.1.0", "f52f73eb344bc22de335992377413138405796e0d0ad99d995d9977ac29f1ca9", [:mix], [], "hexpm", "684fd19191aedfaf387bbabbeb8ff3c752f0220c8112eb907d797f4592d6e871"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, "phoenix": {:hex, :phoenix, "1.5.14", "2d5db884be496eefa5157505ec0134e66187cb416c072272420c5509d67bf808", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "207f1aa5520320cbb7940d7ff2dde2342162cf513875848f88249ea0ba02fef7"}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.6.3", "f686701b0499a07f2e3b122d84d52ff8a31f5def386e03706c916f6feddf69ef", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "909502956916a657a197f94cc1206d9a65247538de8a5e186f7537c895d95764"}, - "phoenix_html": {:hex, :phoenix_html, "3.0.4", "232d41884fe6a9c42d09f48397c175cd6f0d443aaa34c7424da47604201df2e1", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "ce17fd3cf815b2ed874114073e743507704b1f5288bb03c304a77458485efc8b"}, + "phoenix_html": {:hex, :phoenix_html, "3.3.4", "42a09fc443bbc1da37e372a5c8e6755d046f22b9b11343bf885067357da21cb3", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0249d3abec3714aff3415e7ee3d9786cb325be3151e6c4b3021502c585bf53fb"}, "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.3", "3a53772a6118d5679bf50fc1670505a290e32a1d195df9e069d8c53ab040c054", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "766796676e5f558dbae5d1bdb066849673e956005e3730dfd5affd7a6da4abac"}, + "phoenix_live_view": {:hex, :phoenix_live_view, "0.17.7", "05a42377075868a678d446361effba80cefef19ab98941c01a7a4c7560b29121", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.5.9 or ~> 1.6.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.1", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "25eaf41028eb351b90d4f69671874643a09944098fefd0d01d442f40a6091b6f"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, "plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"}, "plug_cowboy": {:hex, :plug_cowboy, "2.7.2", "fdadb973799ae691bf9ecad99125b16625b1c6039999da5fe544d99218e662e4", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "245d8a11ee2306094840c000e8816f0cbed69a23fc0ac2bcf8d7835ae019bb2f"}, @@ -133,8 +144,9 @@ "redix": {:hex, :redix, "1.5.2", "ab854435a663f01ce7b7847f42f5da067eea7a3a10c0a9d560fa52038fd7ab48", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:nimble_options, "~> 0.5.0 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "78538d184231a5d6912f20567d76a49d1be7d3fca0e1aaaa20f4df8e1142dcb8"}, "remote_ip": {:hex, :remote_ip, "1.2.0", "fb078e12a44414f4cef5a75963c33008fe169b806572ccd17257c208a7bc760f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "2ff91de19c48149ce19ed230a81d377186e4412552a597d6a5137373e5877cb7"}, "req": {:hex, :req, "0.5.6", "8fe1eead4a085510fe3d51ad854ca8f20a622aae46e97b302f499dfb84f726ac", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "cfaa8e720945d46654853de39d368f40362c2641c4b2153c886418914b372185"}, - "rustler_precompiled": {:hex, :rustler_precompiled, "0.8.1", "8afe0b6f3a9a677ada046cdd23e3f4c6399618b91a6122289324774961281e1e", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:rustler, "~> 0.23", [hex: :rustler, repo: "hexpm", optional: true]}], "hexpm", "90b8c2297bf7959cfa1c927b2881faad7bb0707183124955369991b76177a166"}, + "rustler_precompiled": {:hex, :rustler_precompiled, "0.8.2", "5f25cbe220a8fac3e7ad62e6f950fcdca5a5a5f8501835d2823e8c74bf4268d5", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:rustler, "~> 0.23", [hex: :rustler, repo: "hexpm", optional: true]}], "hexpm", "63d1bd5f8e23096d1ff851839923162096364bac8656a4a3c00d1fff8e83ee0a"}, "siwe": {:git, "https://github.com/royal-markets/siwe-ex.git", "51c9c08240eb7eea3c35693011f8d260cd9bb3be", [ref: "51c9c08240eb7eea3c35693011f8d260cd9bb3be"]}, + "sleeplocks": {:hex, :sleeplocks, "1.1.3", "96a86460cc33b435c7310dbd27ec82ca2c1f24ae38e34f8edde97f756503441a", [:rebar3], [], "hexpm", "d3b3958552e6eb16f463921e70ae7c767519ef8f5be46d7696cc1ed649421321"}, "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, "spandex": {:hex, :spandex, "3.2.0", "f8cd40146ea988c87f3c14054150c9a47ba17e53cd4515c00e1f93c29c45404d", [:mix], [{:decorator, "~> 1.2", [hex: :decorator, repo: "hexpm", optional: true]}, {:optimal, "~> 0.3.3", [hex: :optimal, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d0a7d5aef4c5af9cf5467f2003e8a5d8d2bdae3823a6cc95d776b9a2251d4d03"}, "spandex_datadog": {:hex, :spandex_datadog, "1.4.0", "0594b9655b0af00ab9137122616bc0208b68ceec01e9916ab13d6fbb33dcce35", [:mix], [{:msgpax, "~> 2.2.1 or ~> 2.3", [hex: :msgpax, repo: "hexpm", optional: false]}, {:spandex, "~> 3.2", [hex: :spandex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "360f8e1b4db238c1749c4872b1697b096429927fa42b8858d0bb782067380123"}, @@ -142,16 +154,18 @@ "spandex_phoenix": {:hex, :spandex_phoenix, "1.1.0", "9cff829d05258dd49a227c56711b19b69a8fd5d4873d8e9a92a4f4097e7322ab", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.3", [hex: :plug, repo: "hexpm", optional: false]}, {:spandex, "~> 2.2 or ~> 3.0", [hex: :spandex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "265fe05c1736485fbb75d66ef7576682ebf6428c391dd54d22217f612fd4ddad"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, + "sweet_xml": {:hex, :sweet_xml, "0.7.5", "803a563113981aaac202a1dbd39771562d0ad31004ddbfc9b5090bdcd5605277", [:mix], [], "hexpm", "193b28a9b12891cae351d81a0cead165ffe67df1b73fe5866d10629f4faefb12"}, "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, "tesla": {:hex, :tesla, "1.13.0", "24a068a48d107080dd7c943a593997eee265977a38020eb2ab657cca78a12502", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "7b8fc8f6b0640fa0d090af7889d12eb396460e044b6f8688a8e55e30406a2200"}, "timex": {:hex, :timex, "3.7.11", "bb95cb4eb1d06e27346325de506bcc6c30f9c6dea40d1ebe390b262fad1862d1", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.20", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8b9024f7efbabaf9bd7aa04f65cf8dcd7c9818ca5737677c7b76acbc6a94d1aa"}, - "toml": {:hex, :toml, "0.6.2", "38f445df384a17e5d382befe30e3489112a48d3ba4c459e543f748c2f25dd4d1", [:mix], [], "hexpm", "d013e45126d74c0c26a38d31f5e8e9b83ea19fc752470feb9a86071ca5a672fa"}, "typed_ecto_schema": {:hex, :typed_ecto_schema, "0.4.1", "a373ca6f693f4de84cde474a67467a9cb9051a8a7f3f615f1e23dc74b75237fa", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "85c6962f79d35bf543dd5659c6adc340fd2480cacc6f25d2cc2933ea6e8fcb3b"}, "tzdata": {:hex, :tzdata, "1.1.1", "20c8043476dfda8504952d00adac41c6eda23912278add38edc140ae0c5bcc46", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a69cec8352eafcd2e198dea28a34113b60fdc6cb57eb5ad65c10292a6ba89787"}, "ueberauth": {:hex, :ueberauth, "0.10.8", "ba78fbcbb27d811a6cd06ad851793aaf7d27c3b30c9e95349c2c362b344cd8f0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "f2d3172e52821375bccb8460e5fa5cb91cfd60b19b636b6e57e9759b6f8c10c1"}, "ueberauth_auth0": {:hex, :ueberauth_auth0, "2.1.0", "0632d5844049fa2f26823f15e1120aa32f27df6f27ce515a4b04641736594bf4", [:mix], [{:oauth2, "~> 2.0", [hex: :oauth2, repo: "hexpm", optional: false]}, {:ueberauth, "~> 0.7", [hex: :ueberauth, repo: "hexpm", optional: false]}], "hexpm", "8d3b30fa27c95c9e82c30c4afb016251405706d2e9627e603c3c9787fd1314fc"}, "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, + "unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"}, "varint": {:hex, :varint, "1.4.0", "b7405c8a99db7b95d4341fa9cb15e7c3af6c8dda43e21bbe1c4a9cdff50b6502", [:mix], [], "hexpm", "0fd461901b7120c03467530dff3c58fa3475328fd75ba72c7d3cbf13bce6b0d2"}, + "vix": {:hex, :vix, "0.31.1", "2b1d379393060ee8e4e1f1c9a621811c4091d8f063221c1ff24a41a4f0c97edc", [:make, :mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:cc_precompiler, "~> 0.1.4 or ~> 0.2", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.7.3 or ~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:kino, "~> 0.7", [hex: :kino, repo: "hexpm", optional: true]}], "hexpm", "766856b52bec222cb5fd301f645a7a9869b61e0ec6e87dc0789ae9657356a8ea"}, "wallaby": {:hex, :wallaby, "0.30.9", "51d60682092c3c428c63b656b818e2258202b9f9a31ec37230659647ae20325b", [:mix], [{:ecto_sql, ">= 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}, {:httpoison, "~> 0.12 or ~> 1.0 or ~> 2.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:phoenix_ecto, ">= 3.0.0", [hex: :phoenix_ecto, repo: "hexpm", optional: true]}, {:web_driver_client, "~> 0.2.0", [hex: :web_driver_client, repo: "hexpm", optional: false]}], "hexpm", "62e3ccb89068b231b50ed046219022020516d44f443eebef93a19db4be95b808"}, "web_driver_client": {:hex, :web_driver_client, "0.2.0", "63b76cd9eb3b0716ec5467a0f8bead73d3d9612e63f7560d21357f03ad86e31a", [:mix], [{:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:tesla, "~> 1.3", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "83cc6092bc3e74926d1c8455f0ce927d5d1d36707b74d9a65e38c084aab0350f"}, "websockex": {:hex, :websockex, "0.4.3", "92b7905769c79c6480c02daacaca2ddd49de936d912976a4d3c923723b647bf0", [:mix], [], "hexpm", "95f2e7072b85a3a4cc385602d42115b73ce0b74a9121d0d6dbbf557645ac53e4"}, diff --git a/rel/config.exs b/rel/config.exs index 5f7d8e9afaaa..bf76f3165d28 100644 --- a/rel/config.exs +++ b/rel/config.exs @@ -71,7 +71,7 @@ end # will be used by default release :blockscout do - set version: "6.9.2-beta" + set version: "6.10.1" set applications: [ :runtime_tools, block_scout_web: :permanent,