From 7bdcd476fdead9670820e71d4541e5b53bce21dc Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 10 Mar 2026 09:15:31 +0300 Subject: [PATCH 01/42] chore: Allow disabling contract creation internal transaction association (#14090) --- .../controllers/api/v2/address_controller.ex | 7 ++- .../api/v2/smart_contract_controller.ex | 7 ++- apps/explorer/lib/explorer/chain/address.ex | 61 +++++++++++++------ .../test/explorer/chain/address_test.exs | 46 ++++++++++++++ config/runtime.exs | 2 + docker-compose/envs/common-blockscout.env | 1 + 6 files changed, 101 insertions(+), 23 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index 0f85998254ff..5a1b0502999c 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -156,10 +156,13 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec contract_address_preloads() :: [keyword()] defp contract_address_preloads do + include_internal_tx = + !Application.get_env(:explorer, :api_disable_contract_creation_internal_transaction_association, false) + chain_type_associations = case chain_type() do - :filecoin -> Address.contract_creation_transaction_with_from_address_associations() - _ -> Address.contract_creation_transaction_associations() + :filecoin -> Address.contract_creation_transaction_with_from_address_associations(include_internal_tx) + _ -> Address.contract_creation_transaction_associations(include_internal_tx) end [:smart_contract | chain_type_associations] diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex index e3b20526873a..a561f4522200 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/smart_contract_controller.ex @@ -271,12 +271,15 @@ defmodule BlockScoutWeb.API.V2.SmartContractController do @spec contract_creation_transaction_associations() :: [keyword()] defp contract_creation_transaction_associations do + include_internal_tx = + !Application.get_env(:explorer, :api_disable_contract_creation_internal_transaction_association, false) + case chain_type() do :filecoin -> - Address.contract_creation_transaction_with_from_address_associations() + Address.contract_creation_transaction_with_from_address_associations(include_internal_tx) _ -> - Address.contract_creation_transaction_associations() + Address.contract_creation_transaction_associations(include_internal_tx) end end diff --git a/apps/explorer/lib/explorer/chain/address.ex b/apps/explorer/lib/explorer/chain/address.ex index 056423645502..81e9f881de5a 100644 --- a/apps/explorer/lib/explorer/chain/address.ex +++ b/apps/explorer/lib/explorer/chain/address.ex @@ -956,35 +956,58 @@ defmodule Explorer.Chain.Address do end @doc """ - Returns both contract creation transaction and internal transaction - associations. + Returns contract creation transaction associations. - This is a convenience function that combines both types of contract creation - associations. + By default, includes both the regular transaction association and the internal + transaction association. Can be customized via the `include_internal_transaction` + parameter. + + ## Parameters + + - `include_internal_transaction`: Whether to include the internal transaction + association. Defaults to `true`. Set to `false` to return only the regular + transaction association. ## Returns - A list containing both contract creation transaction and internal transaction - associations. + A list containing the contract creation transaction associations. """ - @spec contract_creation_transaction_associations() :: [keyword()] - def contract_creation_transaction_associations do - [ - contract_creation_transaction_association(), - contract_creation_internal_transaction_association() - ] + @spec contract_creation_transaction_associations(boolean()) :: [keyword()] + def contract_creation_transaction_associations(include_internal_transaction \\ true) do + if include_internal_transaction do + [ + contract_creation_transaction_association(), + contract_creation_internal_transaction_association() + ] + else + [contract_creation_transaction_association()] + end end @doc """ - Same as `contract_creation_transaction_associations/0`, but preloads a nested + Same as `contract_creation_transaction_associations/1`, but preloads a nested association for the `from_address` field. Used for Filecoin chain type. + + ## Parameters + + - `include_internal_transaction`: Whether to include the internal transaction + association. Defaults to `true`. Set to `false` to return only the regular + transaction association. + + ## Returns + + A list containing the contract creation transaction associations with from_address. """ - @spec contract_creation_transaction_with_from_address_associations() :: [keyword()] - def contract_creation_transaction_with_from_address_associations do - [ - contract_creation_transaction_with_from_address_association(), - contract_creation_internal_transaction_with_from_address_association() - ] + @spec contract_creation_transaction_with_from_address_associations(boolean()) :: [keyword()] + def contract_creation_transaction_with_from_address_associations(include_internal_transaction \\ true) do + if include_internal_transaction do + [ + contract_creation_transaction_with_from_address_association(), + contract_creation_internal_transaction_with_from_address_association() + ] + else + [contract_creation_transaction_with_from_address_association()] + end end @doc """ diff --git a/apps/explorer/test/explorer/chain/address_test.exs b/apps/explorer/test/explorer/chain/address_test.exs index 15bd4193af18..b731a4b7b376 100644 --- a/apps/explorer/test/explorer/chain/address_test.exs +++ b/apps/explorer/test/explorer/chain/address_test.exs @@ -172,4 +172,50 @@ defmodule Explorer.Chain.AddressTest do assert response == {:ok, address} end end + + describe "contract_creation_transaction_associations/1" do + test "by default includes both transaction and internal transaction associations" do + associations = Address.contract_creation_transaction_associations() + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + + test "includes both associations when include_internal_transaction is true" do + associations = Address.contract_creation_transaction_associations(true) + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + + test "excludes internal transaction association when include_internal_transaction is false" do + associations = Address.contract_creation_transaction_associations(false) + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + refute Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + end + + describe "contract_creation_transaction_with_from_address_associations/1" do + test "by default includes both transaction and internal transaction associations" do + associations = Address.contract_creation_transaction_with_from_address_associations() + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + + test "includes both associations when include_internal_transaction is true" do + associations = Address.contract_creation_transaction_with_from_address_associations(true) + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + + test "excludes internal transaction association when include_internal_transaction is false" do + associations = Address.contract_creation_transaction_with_from_address_associations(false) + + assert Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_transaction)) + refute Enum.any?(associations, &Keyword.has_key?(&1, :contract_creation_internal_transaction)) + end + end end diff --git a/config/runtime.exs b/config/runtime.exs index d10e89a493ce..17d242e8f24a 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -300,6 +300,8 @@ config :explorer, chain_identity: ConfigHelper.chain_identity(), coin: coin, coin_name: System.get_env("COIN_NAME") || "ETH", + api_disable_contract_creation_internal_transaction_association: + ConfigHelper.parse_bool_env_var("API_DISABLE_CONTRACT_CREATION_INTERNAL_TRANSACTION_ASSOCIATION"), allowed_solidity_evm_versions: System.get_env("CONTRACT_VERIFICATION_ALLOWED_SOLIDITY_EVM_VERSIONS") || "homestead,tangerineWhistle,spuriousDragon,byzantium,constantinople,petersburg,istanbul,berlin,london,paris,shanghai,cancun,prague,osaka,default", diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index a5922ee10ff0..1421a28bb55e 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -178,6 +178,7 @@ API_V1_WRITE_METHODS_DISABLED=false # API_GRAPHQL_RATE_LIMIT_BY_IP= # API_GRAPHQL_RATE_LIMIT_BY_IP_TIME_INTERVAL= # API_GRAPHQL_RATE_LIMIT_STATIC_API_KEY= +# API_DISABLE_CONTRACT_CREATION_INTERNAL_TRANSACTION_ASSOCIATION=false # DISABLE_INDEXER=false # DISABLE_REALTIME_INDEXER=false # DISABLE_CATCHUP_INDEXER=false From ef67079858ed23628fe67933bffdf0fc05532042 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 10 Mar 2026 09:33:18 +0300 Subject: [PATCH 02/42] Update CHANGELOG --- CHANGELOG.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 69842c2ce09c..4b6affa1b5ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## 10.0.3 + +### ⚙️ Miscellaneous Tasks + +- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090)) + + +## 10.0.2 + +### ⚙️ Miscellaneous Tasks + +- Add missing TokenBalance.Current launch in tests ([#14076](https://github.com/blockscout/blockscout/issues/14076)) +- Put backend versions into constants on launch ([#14072](https://github.com/blockscout/blockscout/issues/14072)) + + ## 10.0.1 ### ⚡ Performance @@ -122,6 +137,13 @@ | `CACHE_PBO_COUNT_PERIOD` | Time interval to restart the task which calculates the total pending_block_operations count. | `20m` | v5.2.0+ | | v10.0.0 | +## 9.3.7 + +### ⚙️ Miscellaneous Tasks + +- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090)) + + ## 9.3.6 ### ⚡ Performance From 407eabc25b0266e90a16fbfb17dfc9c9049dc35f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 10:36:00 +0300 Subject: [PATCH 03/42] chore(deps-dev): bump credo from 1.7.15 to 1.7.16 (#13937) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Victor Baranov --- .../elixir-credo-predicate-naming/SKILL.md | 46 +++++++ .../account/api/v2/user_controller.ex | 30 +++-- .../address_transaction_controller.ex | 46 ++++--- .../api/rpc/contract_controller.ex | 45 +++---- .../v1/verified_smart_contract_controller.ex | 15 +-- .../api/v2/csv_export_controller.ex | 55 +++++---- .../controllers/api/v2/mud_controller.ex | 30 +++-- .../controllers/api/v2/optimism_controller.ex | 17 +-- .../api/v2/transaction_controller.ex | 34 +++--- .../controllers/smart_contract_controller.ex | 110 +++++++++-------- .../controllers/transaction_controller.ex | 96 ++++++--------- .../transaction_raw_trace_controller.ex | 24 ++-- .../visualize_sol2uml_controller.ex | 50 ++++---- .../ethereum_jsonrpc/pending_transaction.ex | 30 ++--- .../lib/explorer/account/notifier/notify.ex | 18 +-- .../lib/explorer/chain/bridged_token.ex | 26 ++-- apps/explorer/lib/explorer/chain/log.ex | 49 +++++--- .../explorer/chain/smart_contract/proxy.ex | 38 +++--- .../chain/smart_contract/proxy/eip_1967.ex | 18 +-- .../chain/transaction/history/historian.ex | 49 ++++---- apps/explorer/lib/explorer/eth_rpc.ex | 26 ++-- .../lib/explorer/market/source/coin_gecko.ex | 67 +++++----- .../explorer/market/source/coin_market_cap.ex | 34 +++--- .../lib/explorer/market/source/dia.ex | 114 ++++++++++-------- .../lib/explorer/market/source/mobula.ex | 48 ++++---- .../restore_omitted_weth_transfers.ex | 18 +-- .../lib/explorer/tags/address_to_tag.ex | 28 +++-- .../third_party_integrations/sourcify.ex | 4 +- .../lib/explorer/token/metadata_retriever.ex | 27 +++-- .../lib/explorer/visualize/sol2uml.ex | 2 +- apps/explorer/lib/test_helper.ex | 20 +-- .../fetcher/arbitrum/workers/backfill.ex | 20 +-- .../workers/confirmations/rollup_blocks.ex | 52 +++++--- .../indexer/fetcher/filecoin/address_info.ex | 16 ++- .../indexer/fetcher/internal_transaction.ex | 22 ++-- .../fetcher/optimism/eip1559_config_update.ex | 107 +++++++++------- .../fetcher/optimism/transaction_batch.ex | 79 ++++++------ .../indexer/fetcher/zilliqa/zrc2_tokens.ex | 107 +++++++++------- .../indexer/pending_transactions_sanitizer.ex | 24 ++-- .../transform/address_coin_balances_daily.ex | 31 ++--- .../lib/nft_media_handler.ex | 7 +- mix.lock | 2 +- 42 files changed, 953 insertions(+), 728 deletions(-) create mode 100644 .agents/skills/elixir-credo-predicate-naming/SKILL.md diff --git a/.agents/skills/elixir-credo-predicate-naming/SKILL.md b/.agents/skills/elixir-credo-predicate-naming/SKILL.md new file mode 100644 index 000000000000..1db47e184700 --- /dev/null +++ b/.agents/skills/elixir-credo-predicate-naming/SKILL.md @@ -0,0 +1,46 @@ +--- +name: elixir-credo-predicate-naming +description: "Use when working on Elixir code with Credo predicate naming warnings, boolean helper functions, or renaming functions that start with is_. Prevents violations like: Predicate function names should not start with 'is' and should end in a question mark." +--- + +# Elixir Credo Predicate Naming + +Use this skill to prevent and fix predicate naming violations in Elixir. + +## Rules + +- Predicate functions must end with `?`. +- Predicate functions must not start with `is_`. +- Prefer names like `valid_*?`, `enabled_*?`, `has_*?`, `can_*?`, `matches_*?`, or `_*?`. + +## Refactor Workflow + +1. Find predicate functions named like `is_*?`. +2. Rename each one to a Credo-compliant name that still reads clearly. +3. Update all call sites in the same module and across the codebase. +4. Keep arity unchanged unless behavior intentionally changes. +5. Run a focused Credo check for edited files. + +## Naming Guidance + +- `is_valid_zrc2_transfer_log?/4` -> `valid_zrc2_transfer_log?/4` +- `is_enabled?/1` -> `enabled?/1` +- `is_erc20_transfer?/2` -> `erc20_transfer?/2` +- `is_contract_verified?/1` -> `contract_verified?/1` + +## Safety Checks + +- Preserve semantics during rename. +- Verify no stale references remain. +- If the function is part of a public API, rename consistently and update docs/specs. + +## Verification Commands + +```bash +mix credo path/to/file.ex +mix test +``` + +## Expected Result + +No Credo findings for predicate naming in updated files. diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/user_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/user_controller.ex index 8aca96eca49c..b6fbe259b8e7 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/user_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/user_controller.ex @@ -57,18 +57,7 @@ defmodule BlockScoutWeb.Account.API.V2.UserController do watchlist_addresses_prepared = Enum.map(watchlist_addresses, fn %WatchlistAddress{} = wa -> - balances = - Chain.fetch_paginated_last_token_balances(wa.address_hash, - paging_options: %PagingOptions{page_size: @token_balances_amount + 1} - ) - - count = Enum.count(balances) - overflow? = count > @token_balances_amount - - fiat_sum = - balances - |> Enum.take(@token_balances_amount) - |> Enum.reduce(Decimal.new(0), fn tb, acc -> Decimal.add(acc, tb.fiat_value || 0) end) + {fiat_sum, count, overflow?} = watchlist_token_stats(wa.address_hash) %WatchlistAddress{ wa @@ -88,6 +77,23 @@ defmodule BlockScoutWeb.Account.API.V2.UserController do end end + defp watchlist_token_stats(address_hash) do + balances = + Chain.fetch_paginated_last_token_balances(address_hash, + paging_options: %PagingOptions{page_size: @token_balances_amount + 1} + ) + + count = Enum.count(balances) + overflow? = count > @token_balances_amount + + fiat_sum = + balances + |> Enum.take(@token_balances_amount) + |> Enum.reduce(Decimal.new(0), fn tb, acc -> Decimal.add(acc, tb.fiat_value || 0) end) + + {fiat_sum, count, overflow?} + end + def delete_watchlist(conn, %{"id" => watchlist_address_id}) do with {:auth, %{id: uid}} <- {:auth, current_user(conn)}, {:identity, %Identity{} = identity} <- {:identity, Identity.find_identity(uid)}, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex index 7f73149a5466..c8046c1ba30c 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/address_transaction_controller.ex @@ -66,26 +66,7 @@ defmodule BlockScoutWeb.AddressTransactionController do items_json = Enum.map(results, fn result -> - case result do - {%Chain.Block.Reward{} = emission_reward, %Chain.Block.Reward{} = validator_reward} -> - View.render_to_string( - TransactionView, - "_emission_reward_tile.html", - current_address: address, - emission_funds: emission_reward, - validator: validator_reward - ) - - %Chain.Transaction{} = transaction -> - View.render_to_string( - TransactionView, - "_tile.html", - conn: conn, - current_address: address, - transaction: transaction, - burn_address_hash: @burn_address_hash - ) - end + render_address_transaction_item(result, conn, address) end) json(conn, %{items: items_json, next_page_path: next_page_url}) @@ -160,4 +141,29 @@ defmodule BlockScoutWeb.AddressTransactionController do end end end + + defp render_address_transaction_item( + {%Chain.Block.Reward{} = emission_reward, %Chain.Block.Reward{} = validator_reward}, + _conn, + address + ) do + View.render_to_string( + TransactionView, + "_emission_reward_tile.html", + current_address: address, + emission_funds: emission_reward, + validator: validator_reward + ) + end + + defp render_address_transaction_item(%Chain.Transaction{} = transaction, conn, address) do + View.render_to_string( + TransactionView, + "_tile.html", + conn: conn, + current_address: address, + transaction: transaction, + burn_address_hash: @burn_address_hash + ) + end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex index ce2bb90b0903..f97acb3256e2 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/rpc/contract_controller.ex @@ -131,29 +131,32 @@ defmodule BlockScoutWeb.API.RPC.ContractController do end def verify_via_sourcify(conn, %{"addressHash" => address_hash} = input) do - files = - if Map.has_key?(input, "files") do - input["files"] - else - [] - end + files = sourcify_files(input) - if SmartContract.verified_with_full_match?(address_hash) do - render(conn, :error, error: @verified) + with false <- SmartContract.verified_with_full_match?(address_hash), + {:ok, _verified_status} <- Sourcify.check_by_address(address_hash) do + get_metadata_and_publish(address_hash, conn) else - case Sourcify.check_by_address(address_hash) do - {:ok, _verified_status} -> - get_metadata_and_publish(address_hash, conn) - - _ -> - with {:ok, files_array} <- prepare_params(files), - {:ok, validated_files} <- validate_files(files_array) do - verify_and_publish(address_hash, validated_files, conn) - else - {:error, error} -> - render(conn, :error, error: error) - end - end + true -> render(conn, :error, error: @verified) + _ -> verify_via_sourcify_with_files(conn, address_hash, files) + end + end + + defp verify_via_sourcify_with_files(conn, address_hash, files) do + with {:ok, files_array} <- prepare_params(files), + {:ok, validated_files} <- validate_files(files_array) do + verify_and_publish(address_hash, validated_files, conn) + else + {:error, error} -> + render(conn, :error, error: error) + end + end + + defp sourcify_files(input) do + if Map.has_key?(input, "files") do + input["files"] + else + [] end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/verified_smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/verified_smart_contract_controller.ex index ab3d52e122a2..10c445febda3 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/verified_smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v1/verified_smart_contract_controller.ex @@ -16,13 +16,7 @@ defmodule BlockScoutWeb.API.V1.VerifiedSmartContractController do send_resp(conn, :created, encode(%{status: :success})) {:error, changeset} -> - errors = - changeset.errors - |> Enum.into(%{}, fn {field, {message, _}} -> - {field, message} - end) - - send_resp(conn, :unprocessable_entity, encode(errors)) + send_resp(conn, :unprocessable_entity, encode(format_changeset_errors(changeset))) end else :invalid_address -> @@ -40,6 +34,13 @@ defmodule BlockScoutWeb.API.V1.VerifiedSmartContractController do end end + defp format_changeset_errors(changeset) do + changeset.errors + |> Enum.into(%{}, fn {field, {message, _}} -> + {field, message} + end) + end + defp validate_address_hash(address_hash) do case AddressHash.cast(address_hash) do {:ok, hash} -> {:ok, hash} diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex index 00f81e619256..ca0366b22f73 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/csv_export_controller.ex @@ -59,18 +59,23 @@ defmodule BlockScoutWeb.API.V2.CsvExportController do token_holders |> CurrentTokenBalance.to_csv_format(token) |> CsvHelper.dump_to_stream() - |> Enum.reduce_while(put_resp_params(conn), fn chunk, conn -> - case Conn.chunk(conn, chunk) do - {:ok, conn} -> - {:cont, conn} - - {:error, :closed} -> - {:halt, conn} - end - end) + |> stream_csv_chunks(conn) end end + defp stream_csv_chunks(csv_stream, conn) do + csv_stream + |> Enum.reduce_while(put_resp_params(conn), fn chunk, conn -> + case Conn.chunk(conn, chunk) do + {:ok, conn} -> + {:cont, conn} + + {:error, :closed} -> + {:halt, conn} + end + end) + end + @spec put_resp_params(Conn.t()) :: Conn.t() def put_resp_params(conn) do conn @@ -94,20 +99,7 @@ defmodule BlockScoutWeb.API.V2.CsvExportController do when is_binary(address_hash_string) do with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), {:address_exists, true} <- {:address_exists, Address.address_exists?(address_hash)} do - filter_type = Map.get(params, :filter_type) - filter_value = Map.get(params, :filter_value) - - address_hash - |> csv_export_module.export(from_period, to_period, fetch_scam_token_toggle([], conn), filter_type, filter_value) - |> Enum.reduce_while(put_resp_params(conn), fn chunk, conn -> - case Conn.chunk(conn, chunk) do - {:ok, conn} -> - {:cont, conn} - - {:error, :closed} -> - {:halt, conn} - end - end) + stream_address_export(address_hash, csv_export_module, from_period, to_period, params, conn) else :error -> unprocessable_entity(conn) @@ -119,6 +111,23 @@ defmodule BlockScoutWeb.API.V2.CsvExportController do defp items_csv(conn, _, _), do: not_found(conn) + defp stream_address_export(address_hash, csv_export_module, from_period, to_period, params, conn) do + filter_type = Map.get(params, :filter_type) + filter_value = Map.get(params, :filter_value) + + address_hash + |> csv_export_module.export(from_period, to_period, fetch_scam_token_toggle([], conn), filter_type, filter_value) + |> Enum.reduce_while(put_resp_params(conn), fn chunk, conn -> + case Conn.chunk(conn, chunk) do + {:ok, conn} -> + {:cont, conn} + + {:error, :closed} -> + {:halt, conn} + end + end) + end + operation :token_transfers_csv, summary: "Export token transfers as CSV", description: "Exports token transfers for a specific address as a CSV file.", diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex index e7c60d6dd3ee..36da2d506287 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/mud_controller.ex @@ -430,19 +430,27 @@ defmodule BlockScoutWeb.API.V2.MudController do <<1::256>> "0x" <> hex -> - with {:ok, bin} <- Base.decode16(hex, case: :mixed) do - # addresses are padded to 32 bytes with zeros on the right - if FieldSchema.type_of(schema.key_schema, field_idx) == 97 do - <<0::size(256 - byte_size(bin) * 8), bin::binary>> - else - <> - end - end + decode_hex_filter(hex, schema, field_idx) dec -> - with {num, _} <- Integer.parse(dec) do - <> - end + decode_decimal_filter(dec) + end + end + + defp decode_hex_filter(hex, schema, field_idx) do + with {:ok, bin} <- Base.decode16(hex, case: :mixed) do + # addresses are padded to 32 bytes with zeros on the right + if FieldSchema.type_of(schema.key_schema, field_idx) == 97 do + <<0::size(256 - byte_size(bin) * 8), bin::binary>> + else + <> + end + end + end + + defp decode_decimal_filter(dec) do + with {num, _} <- Integer.parse(dec) do + <> end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex index de8a5b738de1..db862e677778 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/optimism_controller.ex @@ -473,13 +473,7 @@ defmodule BlockScoutWeb.API.V2.OptimismController do _ -> nil end - transfer_token = - if not is_nil(msg.transfer_token_address_hash) do - case Token.get_by_contract_address_hash(msg.transfer_token_address_hash, @api_true) do - nil -> %{contract_address_hash: msg.transfer_token_address_hash, symbol: nil, decimals: nil} - t -> %{contract_address_hash: t.contract_address_hash, symbol: t.symbol, decimals: t.decimals} - end - end + transfer_token = fetch_transfer_token(msg.transfer_token_address_hash) message = msg @@ -501,6 +495,15 @@ defmodule BlockScoutWeb.API.V2.OptimismController do end end + defp fetch_transfer_token(nil), do: nil + + defp fetch_transfer_token(transfer_token_address_hash) do + case Token.get_by_contract_address_hash(transfer_token_address_hash, @api_true) do + nil -> %{contract_address_hash: transfer_token_address_hash, symbol: nil, decimals: nil} + t -> %{contract_address_hash: t.contract_address_hash, symbol: t.symbol, decimals: t.decimals} + end + end + # Calls `InteropMessage.interop_chain_id_to_instance_info` function and depending on the result # returns a map with the instance info. # diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index f66171380558..1c1d26c6ee71 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -632,23 +632,27 @@ defmodule BlockScoutWeb.API.V2.TransactionController do @spec raw_trace(Plug.Conn.t(), map()) :: Plug.Conn.t() | {atom(), any()} def raw_trace(conn, %{transaction_hash_param: transaction_hash_string} = params) do with {:ok, transaction, _transaction_hash} <- validate_transaction(transaction_hash_string, params) do - if is_nil(transaction.block_number) do - conn - |> put_status(200) - |> render(:raw_trace, %{internal_transactions: []}) - else - FirstTraceOnDemand.maybe_trigger_fetch(transaction, @api_true) + render_raw_trace_response(conn, transaction) + end + end - case Chain.fetch_transaction_raw_traces(transaction) do - {:ok, raw_traces} -> - conn - |> put_status(200) - |> render(:raw_trace, %{raw_traces: raw_traces}) + defp render_raw_trace_response(conn, transaction) do + if is_nil(transaction.block_number) do + conn + |> put_status(200) + |> render(:raw_trace, %{internal_transactions: []}) + else + FirstTraceOnDemand.maybe_trigger_fetch(transaction, @api_true) - {:error, error} -> - Logger.error("Raw trace fetching failed: #{inspect(error)}") - {500, "Error while raw trace fetching"} - end + case Chain.fetch_transaction_raw_traces(transaction) do + {:ok, raw_traces} -> + conn + |> put_status(200) + |> render(:raw_trace, %{raw_traces: raw_traces}) + + {:error, error} -> + Logger.error("Raw trace fetching failed: #{inspect(error)}") + {500, "Error while raw trace fetching"} end end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex index 50efc65576e1..710793da1c4b 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/smart_contract_controller.ex @@ -29,30 +29,10 @@ defmodule BlockScoutWeb.SmartContractController do implementation_address_hash_string = implementation_address_hash(contract_type, address) functions = - if action == "write" do - if contract_type == "proxy" do - Writer.write_functions_proxy(implementation_address_hash_string) - else - Writer.write_functions(address.smart_contract) - end - else - if contract_type == "proxy" do - Reader.read_only_functions_proxy(address_hash, implementation_address_hash_string, nil) - else - Reader.read_only_functions(address.smart_contract, address_hash, params["from"]) - end - end + load_functions(action, contract_type, implementation_address_hash_string, address, address_hash, params) read_functions_required_wallet = - if action == "read" do - if contract_type == "proxy" do - Reader.read_functions_required_wallet_proxy(implementation_address_hash_string) - else - Reader.read_functions_required_wallet(address.smart_contract) - end - else - [] - end + load_read_functions_required_wallet(action, contract_type, implementation_address_hash_string, address) contract_abi = Poison.encode!(address.smart_contract.abi) @@ -99,6 +79,27 @@ defmodule BlockScoutWeb.SmartContractController do def index(conn, _), do: not_found(conn) + defp load_functions("write", "proxy", implementation_address_hash_string, _address, _address_hash, _params), + do: Writer.write_functions_proxy(implementation_address_hash_string) + + defp load_functions("write", _contract_type, _implementation_address_hash_string, address, _address_hash, _params), + do: Writer.write_functions(address.smart_contract) + + defp load_functions(_action, "proxy", implementation_address_hash_string, _address, address_hash, _params), + do: Reader.read_only_functions_proxy(address_hash, implementation_address_hash_string, nil) + + defp load_functions(_action, _contract_type, _implementation_address_hash_string, address, address_hash, params), + do: Reader.read_only_functions(address.smart_contract, address_hash, params["from"]) + + defp load_read_functions_required_wallet("read", "proxy", implementation_address_hash_string, _address), + do: Reader.read_functions_required_wallet_proxy(implementation_address_hash_string) + + defp load_read_functions_required_wallet("read", _contract_type, _implementation_address_hash_string, address), + do: Reader.read_functions_required_wallet(address.smart_contract) + + defp load_read_functions_required_wallet(_action, _contract_type, _implementation_address_hash_string, _address), + do: [] + defp implementation_address_hash(contract_type, address) do if contract_type == "proxy" do implementation = Implementation.get_implementation(address.smart_contract) @@ -172,36 +173,10 @@ defmodule BlockScoutWeb.SmartContractController do {:ok, address} <- Address.find_contract_address(address_hash, address_options) do contract_type = if params["type"] == "proxy", do: :proxy, else: :regular - args = - if is_nil(params["args_count"]) do - # we should convert: %{"0" => _, "1" => _} to [_, _] - params["args"] |> convert_map_to_array() - else - {args_count, _} = Integer.parse(params["args_count"]) - - if args_count < 1, - do: [], - else: for(x <- 0..(args_count - 1), do: params["arg_" <> to_string(x)] |> convert_map_to_array()) - end + args = build_query_args(params) %{output: outputs, names: names} = - if custom_abi do - Reader.query_function_with_names_custom_abi( - address_hash, - %{method_id: params["method_id"], args: args}, - params["from"], - custom_abi.abi - ) - else - Reader.query_function_with_names( - address_hash, - %{method_id: params["method_id"], args: args}, - contract_type, - params["from"], - address.smart_contract && address.smart_contract.abi, - true - ) - end + query_function_with_names(custom_abi, address_hash, params, args, contract_type, address) conn |> put_status(200) @@ -226,6 +201,41 @@ defmodule BlockScoutWeb.SmartContractController do end end + defp build_query_args(%{"args_count" => nil} = params) do + # we should convert: %{"0" => _, "1" => _} to [_, _] + params["args"] |> convert_map_to_array() + end + + defp build_query_args(%{"args_count" => args_count} = params) do + {parsed_args_count, _} = Integer.parse(args_count) + + if parsed_args_count < 1, + do: [], + else: for(x <- 0..(parsed_args_count - 1), do: params["arg_" <> to_string(x)] |> convert_map_to_array()) + end + + defp build_query_args(params), do: params["args"] |> convert_map_to_array() + + defp query_function_with_names(custom_abi, address_hash, params, args, contract_type, address) do + if custom_abi do + Reader.query_function_with_names_custom_abi( + address_hash, + %{method_id: params["method_id"], args: args}, + params["from"], + custom_abi.abi + ) + else + Reader.query_function_with_names( + address_hash, + %{method_id: params["method_id"], args: args}, + contract_type, + params["from"], + address.smart_contract && address.smart_contract.abi, + true + ) + end + end + defp convert_map_to_array(map) do if turned_out_array?(map) do map |> Map.values() |> try_to_map_elements() diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/transaction_controller.ex index 7444cac188d5..70e7807e998f 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/transaction_controller.ex @@ -60,65 +60,7 @@ defmodule BlockScoutWeb.TransactionController do def show(conn, %{"id" => id} = params) do with {:ok, transaction_hash} <- Chain.string_to_full_hash(id), :ok <- Transaction.check_transaction_exists(transaction_hash) do - if Chain.transaction_has_token_transfers?(transaction_hash) do - with {:ok, transaction} <- - Chain.hash_to_transaction(transaction_hash, necessity_by_association: @necessity_by_association), - {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.from_address_hash), params), - {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.to_address_hash), params) do - render( - conn, - "show_token_transfers.html", - exchange_rate: Market.get_coin_exchange_rate(), - block_height: Chain.block_height(), - current_path: Controller.current_full_path(conn), - current_user: current_user(conn), - show_token_transfers: true, - transaction: transaction, - from_tags: get_address_tags(transaction.from_address_hash, current_user(conn)), - to_tags: get_address_tags(transaction.to_address_hash, current_user(conn)), - transaction_tags: - get_transaction_with_addresses_tags( - transaction, - current_user(conn) - ) - ) - else - {:error, :not_found} -> - set_not_found_view(conn, id) - - {:restricted_access, _} -> - set_not_found_view(conn, id) - end - else - with {:ok, transaction} <- - Chain.hash_to_transaction(transaction_hash, necessity_by_association: @necessity_by_association), - {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.from_address_hash), params), - {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.to_address_hash), params) do - render( - conn, - "show_internal_transactions.html", - exchange_rate: Market.get_coin_exchange_rate(), - current_path: Controller.current_full_path(conn), - current_user: current_user(conn), - block_height: Chain.block_height(), - show_token_transfers: Chain.transaction_has_token_transfers?(transaction_hash), - transaction: transaction, - from_tags: get_address_tags(transaction.from_address_hash, current_user(conn)), - to_tags: get_address_tags(transaction.to_address_hash, current_user(conn)), - transaction_tags: - get_transaction_with_addresses_tags( - transaction, - current_user(conn) - ) - ) - else - {:error, :not_found} -> - set_not_found_view(conn, id) - - {:restricted_access, _} -> - set_not_found_view(conn, id) - end - end + render_transaction_page(conn, id, transaction_hash, params) else :error -> unprocessable_entity(conn) @@ -128,6 +70,42 @@ defmodule BlockScoutWeb.TransactionController do end end + defp render_transaction_page(conn, id, transaction_hash, params) do + show_token_transfers? = Chain.transaction_has_token_transfers?(transaction_hash) + + template = + if show_token_transfers? do + "show_token_transfers.html" + else + "show_internal_transactions.html" + end + + with {:ok, transaction} <- + Chain.hash_to_transaction(transaction_hash, necessity_by_association: @necessity_by_association), + {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.from_address_hash), params), + {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.to_address_hash), params) do + render( + conn, + template, + exchange_rate: Market.get_coin_exchange_rate(), + block_height: Chain.block_height(), + current_path: Controller.current_full_path(conn), + current_user: current_user(conn), + show_token_transfers: show_token_transfers?, + transaction: transaction, + from_tags: get_address_tags(transaction.from_address_hash, current_user(conn)), + to_tags: get_address_tags(transaction.to_address_hash, current_user(conn)), + transaction_tags: get_transaction_with_addresses_tags(transaction, current_user(conn)) + ) + else + {:error, :not_found} -> + set_not_found_view(conn, id) + + {:restricted_access, _} -> + set_not_found_view(conn, id) + end + end + def set_not_found_view(conn, transaction_hash_string) do conn |> put_status(404) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/transaction_raw_trace_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/transaction_raw_trace_controller.ex index 751ec69b9de6..3b79623bbe40 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/transaction_raw_trace_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/transaction_raw_trace_controller.ex @@ -26,16 +26,7 @@ defmodule BlockScoutWeb.TransactionRawTraceController do ), {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.from_address_hash), params), {:ok, false} <- AccessHelper.restricted_access?(to_string(transaction.to_address_hash), params) do - if is_nil(transaction.block_number) do - render_raw_trace(conn, [], transaction, hash) - else - FirstTraceOnDemand.maybe_trigger_fetch(transaction) - - case Chain.fetch_transaction_raw_traces(transaction) do - {:ok, raw_traces} -> render_raw_trace(conn, raw_traces, transaction, hash) - _error -> unprocessable_entity(conn) - end - end + render_fetched_trace(conn, transaction, hash) else {:restricted_access, _} -> TransactionController.set_not_found_view(conn, hash_string) @@ -48,6 +39,19 @@ defmodule BlockScoutWeb.TransactionRawTraceController do end end + defp render_fetched_trace(conn, transaction, hash) do + if is_nil(transaction.block_number) do + render_raw_trace(conn, [], transaction, hash) + else + FirstTraceOnDemand.maybe_trigger_fetch(transaction) + + case Chain.fetch_transaction_raw_traces(transaction) do + {:ok, raw_traces} -> render_raw_trace(conn, raw_traces, transaction, hash) + _error -> unprocessable_entity(conn) + end + end + end + defp render_raw_trace(conn, raw_traces, transaction, hash) do render( conn, diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/visualize_sol2uml_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/visualize_sol2uml_controller.ex index be136a82f5ed..157ae4f758e0 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/visualize_sol2uml_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/visualize_sol2uml_controller.ex @@ -13,32 +13,15 @@ defmodule BlockScoutWeb.VisualizeSol2umlController do ip: AccessHelper.conn_to_ip_string(conn) ] - if Sol2uml.enabled?() do - with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), - {:ok, address} <- Address.find_contract_address(address_hash, address_options), - # check that contract is verified. partial and bytecode twin verification is ok for this case - false <- is_nil(address.smart_contract) do - sources = - address.smart_contract.smart_contract_additional_sources - |> Enum.map(fn additional_source -> {additional_source.file_name, additional_source.contract_source_code} end) - |> Enum.into(%{}) - |> Map.merge(%{ - get_contract_filename(address.smart_contract.file_path) => address.smart_contract.contract_source_code - }) - - params = %{ - sources: sources - } - - case Sol2uml.visualize_contracts(params) do - {:ok, svg} -> json(conn, %{"address" => address.hash, "contract_svg" => svg, "error" => nil}) - {:error, error} -> json(conn, %{"address" => address.hash, "contract_svg" => nil, "error" => error}) - end - else - _ -> json(conn, %{error: "contract not found or unverified"}) - end + with true <- Sol2uml.enabled?(), + {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), + {:ok, address} <- Address.find_contract_address(address_hash, address_options), + # check that contract is verified. partial and bytecode twin verification is ok for this case + false <- is_nil(address.smart_contract) do + render_visualize_json(conn, address) else - not_found(conn) + false -> not_found(conn) + _ -> json(conn, %{error: "contract not found or unverified"}) end end @@ -66,6 +49,23 @@ defmodule BlockScoutWeb.VisualizeSol2umlController do not_found(conn) end + defp render_visualize_json(conn, address) do + sources = + address.smart_contract.smart_contract_additional_sources + |> Enum.map(fn additional_source -> {additional_source.file_name, additional_source.contract_source_code} end) + |> Enum.into(%{}) + |> Map.merge(%{ + get_contract_filename(address.smart_contract.file_path) => address.smart_contract.contract_source_code + }) + + params = %{sources: sources} + + case Sol2uml.visualize_contracts(params) do + {:ok, svg} -> json(conn, %{"address" => address.hash, "contract_svg" => svg, "error" => nil}) + {:error, error} -> json(conn, %{"address" => address.hash, "contract_svg" => nil, "error" => error}) + end + end + def get_contract_filename(nil), do: "main.sol" def get_contract_filename(filename), do: filename end diff --git a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex index b797585ec5d3..a18362abcf2c 100644 --- a/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex +++ b/apps/ethereum_jsonrpc/lib/ethereum_jsonrpc/pending_transaction.ex @@ -15,21 +15,7 @@ defmodule EthereumJSONRPC.PendingTransaction do with {:ok, transaction_data} <- %{id: 1, method: "txpool_content", params: []} |> request() |> json_rpc(json_rpc_named_arguments), {:transaction_data_is_map, true} <- {:transaction_data_is_map, is_map(transaction_data)} do - transactions_params = - transaction_data["pending"] - |> Enum.flat_map(fn {_address, nonce_transactions_map} -> - nonce_transactions_map - |> Enum.map(fn {_nonce, transaction} -> - transaction - end) - end) - |> Transactions.to_elixir() - |> Transactions.elixir_to_params() - |> Enum.map(fn params -> - # txpool_content always returns transaction with 0x0000000000000000000000000000000000000000000000000000000000000000 value in block hash and index is null. - # https://github.com/ethereum/go-ethereum/issues/19897 - %{params | block_hash: nil, index: nil} - end) + transactions_params = geth_pending_transactions_to_params(transaction_data["pending"]) {:ok, transactions_params} else @@ -38,6 +24,20 @@ defmodule EthereumJSONRPC.PendingTransaction do end end + defp geth_pending_transactions_to_params(pending_transactions_map) do + pending_transactions_map + |> Enum.flat_map(fn {_address, nonce_transactions_map} -> Map.values(nonce_transactions_map) end) + |> Transactions.to_elixir() + |> Transactions.elixir_to_params() + |> Enum.map(&normalize_geth_pending_params/1) + end + + defp normalize_geth_pending_params(params) do + # txpool_content always returns transaction with 0x0000000000000000000000000000000000000000000000000000000000000000 value in block hash and index is null. + # https://github.com/ethereum/go-ethereum/issues/19897 + %{params | block_hash: nil, index: nil} + end + @doc """ parity-style fetching of pending transactions (from `parity_pendingTransactions`) """ diff --git a/apps/explorer/lib/explorer/account/notifier/notify.ex b/apps/explorer/lib/explorer/account/notifier/notify.ex index 72f250909dbc..cdaa60fe2592 100644 --- a/apps/explorer/lib/explorer/account/notifier/notify.ex +++ b/apps/explorer/lib/explorer/account/notifier/notify.ex @@ -60,13 +60,7 @@ defmodule Explorer.Account.Notifier.Notify do summary, direction ) do - notification - |> query_notification(address) - |> Repo.account_repo().all() - |> case do - [] -> save_and_send_notification(notification, address) - _ -> :ok - end + handle_notification_save(notification, address) end {:error, _message} -> @@ -77,6 +71,16 @@ defmodule Explorer.Account.Notifier.Notify do end end + defp handle_notification_save(notification, address) do + notification + |> query_notification(address) + |> Repo.account_repo().all() + |> case do + [] -> save_and_send_notification(notification, address) + _ -> :ok + end + end + defp query_notification(notification, watchlist_address) do from(wn in WatchlistNotification, where: diff --git a/apps/explorer/lib/explorer/chain/bridged_token.ex b/apps/explorer/lib/explorer/chain/bridged_token.ex index ba4ceb291123..ad0e47e6546c 100644 --- a/apps/explorer/lib/explorer/chain/bridged_token.ex +++ b/apps/explorer/lib/explorer/chain/bridged_token.ex @@ -730,23 +730,27 @@ defmodule Explorer.Chain.BridgedToken do |> Decimal.mult(home_token_total_supply) |> Decimal.div(token_decimals_divider) - token = Token.get_by_contract_address_hash(token_hash_str, []) - - token_cap_usd = - if token && token.fiat_value do - token.fiat_value - |> Decimal.mult(token_cap) - else - 0 - end - - {:ok, token_cap_usd} + compute_token_cap_usd(token_hash_str, token_cap) else _ -> :error end end end + defp compute_token_cap_usd(token_hash_str, token_cap) do + token = Token.get_by_contract_address_hash(token_hash_str, []) + + token_cap_usd = + if token && token.fiat_value do + token.fiat_value + |> Decimal.mult(token_cap) + else + 0 + end + + {:ok, token_cap_usd} + end + defp parse_contract_response(abi_encoded_value, types) when is_list(types) do values = try do diff --git a/apps/explorer/lib/explorer/chain/log.ex b/apps/explorer/lib/explorer/chain/log.ex index a60b5de3473d..f4e5210660bd 100644 --- a/apps/explorer/lib/explorer/chain/log.ex +++ b/apps/explorer/lib/explorer/chain/log.ex @@ -229,14 +229,7 @@ defmodule Explorer.Chain.Log do {{:error, :contract_not_verified, candidates}, events_acc} else {_, events_acc} -> - result = - if decoding_from_list? do - mark_events_to_decode_later_via_sig_provider_in_batch(log, transaction.hash) - else - decode_event_via_sig_provider(log, transaction.hash, skip_sig_provider?) - end - - {result, events_acc} + handle_unverified_method(log, transaction, decoding_from_list?, skip_sig_provider?, events_acc) end end end @@ -256,6 +249,17 @@ defmodule Explorer.Chain.Log do end end + defp handle_unverified_method(log, transaction, decoding_from_list?, skip_sig_provider?, events_acc) do + result = + if decoding_from_list? do + mark_events_to_decode_later_via_sig_provider_in_batch(log, transaction.hash) + else + decode_event_via_sig_provider(log, transaction.hash, skip_sig_provider?) + end + + {result, events_acc} + end + defp find_method_candidates_from_db(method_id, log, transaction, options) do event_candidates = method_id @@ -460,25 +464,32 @@ defmodule Explorer.Chain.Log do :log => log, :transaction_hash => transaction_hash }}, %{"abi" => abi}} -> - abi_first_item = abi |> List.first() + decode_sig_provider_batch_item(index, abi, log, transaction_hash) + end) + else + _ -> + input + |> Enum.map(fn {index, _} -> {index, {:error, :could_not_decode}} end) + end + end - if is_map(abi_first_item) do - abi = [abi_first_item |> Map.put("type", "event")] + defp decode_sig_provider_batch_item(index, abi, log, transaction_hash) do + abi_first_item = List.first(abi) - {:ok, selector, mapping} = find_and_decode(abi, log, transaction_hash) + if is_map(abi_first_item) do + normalized_abi = [Map.put(abi_first_item, "type", "event")] + case find_and_decode(normalized_abi, log, transaction_hash) do + {:ok, selector, mapping} -> identifier = Base.encode16(selector.method_id, case: :lower) text = function_call(selector.function, mapping) - {index, {:error, :contract_not_verified, [{:ok, identifier, text, mapping}]}} - else + + {:error, _} -> {index, {:error, :could_not_decode}} - end - end) + end else - _ -> - input - |> Enum.map(fn {index, _} -> {index, {:error, :could_not_decode}} end) + {index, {:error, :could_not_decode}} end end diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex index 2795f569659b..8c87d5fdeb3e 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy.ex @@ -267,18 +267,22 @@ defmodule Explorer.Chain.SmartContract.Proxy do |> fetch_values(address_hash), resolvers_and_fetched_values when is_list(resolvers_and_fetched_values) <- Enum.reduce_while(resolvers_and_requirements, [], fn {resolver, reqs}, acc -> - values = Enum.into(reqs, %{}, fn {name, req} -> {name, Map.get(fetched_values, req, :error)} end) - - if Enum.any?(values, &(elem(&1, 1) == :error)) do - {:halt, :error} - else - {:cont, [{resolver, values} | acc]} - end + reduce_prefetched_resolver_values(resolver, reqs, fetched_values, acc) end) do {:ok, Enum.reverse(resolvers_and_fetched_values)} end end + defp reduce_prefetched_resolver_values(resolver, reqs, fetched_values, acc) do + values = Enum.into(reqs, %{}, fn {name, req} -> {name, Map.get(fetched_values, req, :error)} end) + + if Enum.any?(values, &(elem(&1, 1) == :error)) do + {:halt, :error} + else + {:cont, [{resolver, values} | acc]} + end + end + @doc """ Fetches values for given eth_getStorageAt and eth_call requirements for a given address hash. @@ -304,14 +308,7 @@ defmodule Explorer.Chain.SmartContract.Proxy do |> json_rpc(json_rpc_named_arguments), fetched_values when is_map(fetched_values) <- Enum.reduce_while(responses, %{}, fn result, acc -> - with %{id: id} <- result, - {:ok, req} = Map.fetch(id_to_params, id), - {:ok, value} <- handle_response(req, result) do - {:cont, Map.put(acc, req, value)} - else - _ -> - {:halt, :error} - end + reduce_fetched_value(result, id_to_params, acc) end) do {:ok, fetched_values} else @@ -319,6 +316,17 @@ defmodule Explorer.Chain.SmartContract.Proxy do end end + defp reduce_fetched_value(result, id_to_params, acc) do + with %{id: id} <- result, + {:ok, req} <- Map.fetch(id_to_params, id), + {:ok, value} <- handle_response(req, result) do + {:cont, Map.put(acc, req, value)} + else + _ -> + {:halt, :error} + end + end + @doc """ Fetches value for the given eth_getStorageAt or eth_call request for a given address hash. diff --git a/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_1967.ex b/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_1967.ex index e92da4f88f12..c0d25e90a436 100644 --- a/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_1967.ex +++ b/apps/explorer/lib/explorer/chain/smart_contract/proxy/eip_1967.ex @@ -44,17 +44,19 @@ defmodule Explorer.Chain.SmartContract.Proxy.EIP1967 do def resolve_implementations(_proxy_address, proxy_type, prefetched_values) do with {:ok, value} <- Map.fetch(prefetched_values, :implementation_slot), {:ok, stored_address_hash} <- Proxy.extract_address_hash(value) do - if proxy_type == :eip1967_beacon do - with {:ok, value} <- Proxy.fetch_value({:call, @implementation_signature}, stored_address_hash), - {:ok, implementation_address_hash} <- Proxy.extract_address_hash(value) do - {:ok, [implementation_address_hash]} - end - else - {:ok, [stored_address_hash]} - end + resolve_stored_implementation(proxy_type, stored_address_hash) else :error -> :error _ -> nil end end + + defp resolve_stored_implementation(:eip1967_beacon, stored_address_hash) do + with {:ok, value} <- Proxy.fetch_value({:call, @implementation_signature}, stored_address_hash), + {:ok, implementation_address_hash} <- Proxy.extract_address_hash(value) do + {:ok, [implementation_address_hash]} + end + end + + defp resolve_stored_implementation(_, stored_address_hash), do: {:ok, [stored_address_hash]} end diff --git a/apps/explorer/lib/explorer/chain/transaction/history/historian.ex b/apps/explorer/lib/explorer/chain/transaction/history/historian.ex index b570bb93c02b..a6a775d05e3f 100644 --- a/apps/explorer/lib/explorer/chain/transaction/history/historian.ex +++ b/apps/explorer/lib/explorer/chain/transaction/history/historian.ex @@ -112,35 +112,34 @@ defmodule Explorer.Chain.Transaction.History.Historian do select: {min(block.number), max(block.number)} ) - case Repo.one(min_max_block_query, timeout: :infinity) do - {min_block, max_block} when not is_nil(min_block) and not is_nil(max_block) -> - # Collects stats for the block range determining the given day and add - # the date determining the day to the record. - record = - min_block - |> compile_records_in_range(max_block) - |> Map.put(:date, day_to_fetch) - - records = [ - record - | records - ] - - # By making recursive calls to collect stats for every next day, eventually - # all stats for the specified number of days will be collected. - compile_records(num_days - 1, records) - - _ -> - # If it is not possible to identify the block range for the given day, - # the stats for the day are set to zero. - Logger.warning("tx/per day chart: failed to get min/max blocks through a fallback option}") - records = [%{date: day_to_fetch, number_of_transactions: 0, gas_used: 0, total_fee: 0} | records] - compile_records(num_days - 1, records) - end + compile_records_with_fallback_range(min_max_block_query, day_to_fetch, num_days, records) end end end + defp compile_records_with_fallback_range(min_max_block_query, day_to_fetch, num_days, records) do + case Repo.one(min_max_block_query, timeout: :infinity) do + {min_block, max_block} when not is_nil(min_block) and not is_nil(max_block) -> + # Collects stats for the block range determining the given day and add + # the date determining the day to the record. + record = + min_block + |> compile_records_in_range(max_block) + |> Map.put(:date, day_to_fetch) + + # By making recursive calls to collect stats for every next day, eventually + # all stats for the specified number of days will be collected. + compile_records(num_days - 1, [record | records]) + + _ -> + # If it is not possible to identify the block range for the given day, + # the stats for the day are set to zero. + Logger.warning("tx/per day chart: failed to get min/max blocks through a fallback option}") + records = [%{date: day_to_fetch, number_of_transactions: 0, gas_used: 0, total_fee: 0} | records] + compile_records(num_days - 1, records) + end + end + # Compiles transaction statistics for a given block range. # # This function aggregates data from transactions within the specified block diff --git a/apps/explorer/lib/explorer/eth_rpc.ex b/apps/explorer/lib/explorer/eth_rpc.ex index 41b2f63381c4..6ba65be08d6f 100644 --- a/apps/explorer/lib/explorer/eth_rpc.ex +++ b/apps/explorer/lib/explorer/eth_rpc.ex @@ -1194,17 +1194,7 @@ defmodule Explorer.EthRPC do from_block = Map.get(filters, "fromBlock", "latest") to_block = Map.get(filters, "toBlock", "latest") - if from_block == "latest" || to_block == "latest" || from_block == "pending" || to_block == "pending" do - max_block_number = max_consensus_block_number() - - if is_nil(max_block_number) do - {:error, :empty} - else - to_block_numbers(from_block, to_block, max_block_number) - end - else - to_block_numbers(from_block, to_block, nil) - end + resolve_logs_blocks_range(from_block, to_block) {:block, _} -> {:error, "Invalid Block Hash"} @@ -1214,6 +1204,20 @@ defmodule Explorer.EthRPC do end end + defp resolve_logs_blocks_range(from_block, to_block) do + if from_block == "latest" || to_block == "latest" || from_block == "pending" || to_block == "pending" do + max_block_number = max_consensus_block_number() + + if is_nil(max_block_number) do + {:error, :empty} + else + to_block_numbers(from_block, to_block, max_block_number) + end + else + to_block_numbers(from_block, to_block, nil) + end + end + defp paging_options(%{ "paging_options" => %{ "logIndex" => log_index, diff --git a/apps/explorer/lib/explorer/market/source/coin_gecko.ex b/apps/explorer/lib/explorer/market/source/coin_gecko.ex index fbbfeedf9acc..817deec7bb63 100644 --- a/apps/explorer/lib/explorer/market/source/coin_gecko.ex +++ b/apps/explorer/lib/explorer/market/source/coin_gecko.ex @@ -167,41 +167,50 @@ defmodule Explorer.Market.Source.CoinGecko do headers() ) do tokens - |> Enum.reduce([], fn - %{ - "id" => id, - "symbol" => symbol, - "name" => name, - "platforms" => %{ - ^platform => token_contract_address_hash_string - } - }, - acc -> - case Hash.Address.cast(token_contract_address_hash_string) do - {:ok, token_contract_address_hash} -> - token = %{ - id: id, - symbol: symbol, - name: name, - contract_address_hash: token_contract_address_hash, - type: "ERC-20" - } - - [token | acc] - - _ -> - acc - end - - _, acc -> - acc - end) + |> Enum.reduce([], &reduce_coingecko_token(&1, &2, platform)) else nil -> {:error, "Platform not specified"} {:error, reason} -> {:error, reason} end end + defp reduce_coingecko_token( + %{ + "id" => id, + "symbol" => symbol, + "name" => name, + "platforms" => platforms + }, + acc, + platform + ) do + case Map.get(platforms, platform) do + nil -> + acc + + token_contract_address_hash_string -> + case Hash.Address.cast(token_contract_address_hash_string) do + {:ok, token_contract_address_hash} -> + [build_coingecko_token(id, symbol, name, token_contract_address_hash) | acc] + + _ -> + acc + end + end + end + + defp reduce_coingecko_token(_, acc, _platform), do: acc + + defp build_coingecko_token(id, symbol, name, token_contract_address_hash) do + %{ + id: id, + symbol: symbol, + name: name, + contract_address_hash: token_contract_address_hash, + type: "ERC-20" + } + end + defp put_market_data_to_tokens(tokens, market_data) do currency = config(:currency) market_cap = currency <> "_market_cap" diff --git a/apps/explorer/lib/explorer/market/source/coin_market_cap.ex b/apps/explorer/lib/explorer/market/source/coin_market_cap.ex index 3d4a9bbd021e..b9d7ad781a53 100644 --- a/apps/explorer/lib/explorer/market/source/coin_market_cap.ex +++ b/apps/explorer/lib/explorer/market/source/coin_market_cap.ex @@ -160,24 +160,7 @@ defmodule Explorer.Market.Source.CoinMarketCap do for {%{"timestamp" => date, "quote" => %{^currency_id => %{"price" => opening_price}}}, closing_quote} <- Stream.zip(quotes, Stream.concat(closing_quotes, [nil])) do date = Source.maybe_get_date(date) - - case closing_quote do - %{"quote" => %{^currency_id => %{"price" => closing_price}}} -> - %{ - closing_price: Source.to_decimal(closing_price), - date: date && DateTime.to_date(date), - opening_price: Source.to_decimal(opening_price), - secondary_coin: secondary_coin? - } - - _ -> - %{ - closing_price: Source.to_decimal(opening_price), - date: date && DateTime.to_date(date), - opening_price: Source.to_decimal(opening_price), - secondary_coin: secondary_coin? - } - end + build_price_history_entry(closing_quote, currency_id, opening_price, date, secondary_coin?) end {:ok, result} @@ -189,6 +172,21 @@ defmodule Explorer.Market.Source.CoinMarketCap do end end + defp build_price_history_entry(closing_quote, currency_id, opening_price, date, secondary_coin?) do + closing_price = + case closing_quote do + %{"quote" => %{^currency_id => %{"price" => value}}} -> value + _ -> opening_price + end + + %{ + closing_price: Source.to_decimal(closing_price), + date: date && DateTime.to_date(date), + opening_price: Source.to_decimal(opening_price), + secondary_coin: secondary_coin? + } + end + defp base_url do URI.parse(config(:base_url)) end diff --git a/apps/explorer/lib/explorer/market/source/dia.ex b/apps/explorer/lib/explorer/market/source/dia.ex index 52cba97900f2..53d3c2b262e2 100644 --- a/apps/explorer/lib/explorer/market/source/dia.ex +++ b/apps/explorer/lib/explorer/market/source/dia.ex @@ -177,38 +177,46 @@ defmodule Explorer.Market.Source.DIA do [] ) do tokens - |> Enum.reduce([], fn - %{ - "Asset" => %{ - "Address" => token_contract_address_hash_string, - "Decimals" => decimals - } - }, - acc -> - case (is_nil(coin_address_hash) || - String.downcase(token_contract_address_hash_string) != String.downcase(coin_address_hash)) && - Hash.Address.cast(token_contract_address_hash_string) do - {:ok, token_contract_address_hash} -> - token = %{ - contract_address_hash: token_contract_address_hash, - decimals: decimals - } - - [token | acc] - - _ -> - acc - end - - _, acc -> - acc - end) + |> Enum.reduce([], &reduce_dia_token(&1, &2, coin_address_hash)) else nil -> {:error, "Blockchain not specified"} {:error, reason} -> {:error, reason} end end + defp reduce_dia_token( + %{ + "Asset" => %{ + "Address" => token_contract_address_hash_string, + "Decimals" => decimals + } + }, + acc, + coin_address_hash + ) do + same_as_coin? = + !is_nil(coin_address_hash) && + String.downcase(token_contract_address_hash_string) == String.downcase(coin_address_hash) + + if same_as_coin? do + acc + else + case Hash.Address.cast(token_contract_address_hash_string) do + {:ok, token_contract_address_hash} -> [build_dia_token(token_contract_address_hash, decimals) | acc] + _ -> acc + end + end + end + + defp reduce_dia_token(_, acc, _coin_address_hash), do: acc + + defp build_dia_token(token_contract_address_hash, decimals) do + %{ + contract_address_hash: token_contract_address_hash, + decimals: decimals + } + end + defp do_fetch_coin_price_history(previous_days, secondary_coin?) do datetime_now = DateTime.utc_now() unix_now = datetime_now |> DateTime.to_unix() @@ -228,31 +236,7 @@ defmodule Explorer.Market.Source.DIA do ) do values |> Enum.reduce_while(%{}, fn value, acc -> - with time when not is_nil(time) <- List.first(value), - {:ok, datetime, _} <- DateTime.from_iso8601(time), - date = DateTime.to_date(datetime), - price when not is_nil(price) <- List.last(value) do - {:cont, - Map.update( - acc, - date, - %{ - closing_price: Source.to_decimal(price), - date: date, - opening_price: Source.to_decimal(price), - secondary_coin: secondary_coin? - }, - fn existing_entry -> - %{ - existing_entry - | opening_price: Source.to_decimal(price) - } - end - )} - else - _ -> - {:halt, {:error, "Wrong format of DIA coin price history response: #{inspect(value)}"}} - end + reduce_dia_price_history_entry(value, acc, secondary_coin?) end) |> case do {:error, _reason} = error -> @@ -269,6 +253,34 @@ defmodule Explorer.Market.Source.DIA do end end + defp reduce_dia_price_history_entry(value, acc, secondary_coin?) do + with time when not is_nil(time) <- List.first(value), + {:ok, datetime, _} <- DateTime.from_iso8601(time), + date = DateTime.to_date(datetime), + price when not is_nil(price) <- List.last(value) do + {:cont, + Map.update( + acc, + date, + %{ + closing_price: Source.to_decimal(price), + date: date, + opening_price: Source.to_decimal(price), + secondary_coin: secondary_coin? + }, + fn existing_entry -> + %{ + existing_entry + | opening_price: Source.to_decimal(price) + } + end + )} + else + _ -> + {:halt, {:error, "Wrong format of DIA coin price history response: #{inspect(value)}"}} + end + end + defp base_url do :base_url |> config() |> URI.parse() end diff --git a/apps/explorer/lib/explorer/market/source/mobula.ex b/apps/explorer/lib/explorer/market/source/mobula.ex index 2d8da7399c96..18c58d80bc46 100644 --- a/apps/explorer/lib/explorer/market/source/mobula.ex +++ b/apps/explorer/lib/explorer/market/source/mobula.ex @@ -43,28 +43,7 @@ defmodule Explorer.Market.Source.Mobula do headers() ) do {tokens_to_import, initial_tokens_len} = - Enum.reduce(tokens, {[], 0}, fn token, {to_import, count} -> - address_hash = token["contracts"] && List.first(token["contracts"])["address"] - - case address_hash && Hash.Address.cast(address_hash) do - {:ok, token_contract_address_hash} -> - token_to_import = %{ - symbol: token["symbol"], - name: token["name"], - fiat_value: Source.to_decimal(token["price"]), - volume_24h: Source.to_decimal(token["off_chain_volume"]), - circulating_market_cap: Source.to_decimal(token["market_cap"]), - icon_url: Source.handle_image_url(token["logo"]), - contract_address_hash: token_contract_address_hash, - type: "ERC-20" - } - - {[token_to_import | to_import], count + 1} - - _ -> - {to_import, count + 1} - end - end) + Enum.reduce(tokens, {[], 0}, &reduce_mobula_token/2) fetch_finished? = initial_tokens_len < batch_size new_state = if fetch_finished?, do: nil, else: offset + batch_size @@ -77,6 +56,31 @@ defmodule Explorer.Market.Source.Mobula do end end + defp reduce_mobula_token(token, {to_import, count}) do + address_hash = token["contracts"] && List.first(token["contracts"])["address"] + + to_import_updated = + case address_hash && Hash.Address.cast(address_hash) do + {:ok, token_contract_address_hash} -> [build_mobula_token(token, token_contract_address_hash) | to_import] + _ -> to_import + end + + {to_import_updated, count + 1} + end + + defp build_mobula_token(token, token_contract_address_hash) do + %{ + symbol: token["symbol"], + name: token["name"], + fiat_value: Source.to_decimal(token["price"]), + volume_24h: Source.to_decimal(token["off_chain_volume"]), + circulating_market_cap: Source.to_decimal(token["market_cap"]), + icon_url: Source.handle_image_url(token["logo"]), + contract_address_hash: token_contract_address_hash, + type: "ERC-20" + } + end + @impl Source def native_coin_price_history_fetching_enabled?, do: not is_nil(config(:coin_id)) diff --git a/apps/explorer/lib/explorer/migrator/restore_omitted_weth_transfers.ex b/apps/explorer/lib/explorer/migrator/restore_omitted_weth_transfers.ex index ca3d8e94a03a..fc85f76e3a04 100644 --- a/apps/explorer/lib/explorer/migrator/restore_omitted_weth_transfers.ex +++ b/apps/explorer/lib/explorer/migrator/restore_omitted_weth_transfers.ex @@ -174,13 +174,7 @@ defmodule Explorer.Migrator.RestoreOmittedWETHTransfers do log, [amount] <- Helper.decode_data(data, [{:uint, 256}]) do {from_address_hash, to_address_hash, balance_address_hash} = - if log.first_topic == TokenTransfer.weth_deposit_signature() do - to_address_hash = Helper.truncate_address_hash(to_string(second_topic)) - {burn_address_hash_string(), to_address_hash, to_address_hash} - else - from_address_hash = Helper.truncate_address_hash(to_string(second_topic)) - {from_address_hash, burn_address_hash_string(), from_address_hash} - end + determine_weth_address_hashes(log, second_topic) token_transfer = %{ amount: Decimal.new(amount || 0), @@ -240,6 +234,16 @@ defmodule Explorer.Migrator.RestoreOmittedWETHTransfers do end end + defp determine_weth_address_hashes(log, second_topic) do + if log.first_topic == TokenTransfer.weth_deposit_signature() do + to_address_hash = Helper.truncate_address_hash(to_string(second_topic)) + {burn_address_hash_string(), to_address_hash, to_address_hash} + else + from_address_hash = Helper.truncate_address_hash(to_string(second_topic)) + {from_address_hash, burn_address_hash_string(), from_address_hash} + end + end + defp run_task(batch) do Task.Supervisor.async_nolink(Explorer.WETHMigratorSupervisor, fn -> migrate_batch(batch) diff --git a/apps/explorer/lib/explorer/tags/address_to_tag.ex b/apps/explorer/lib/explorer/tags/address_to_tag.ex index b78facceb5bc..0c9a344bb7be 100644 --- a/apps/explorer/lib/explorer/tags/address_to_tag.ex +++ b/apps/explorer/lib/explorer/tags/address_to_tag.ex @@ -91,18 +91,7 @@ defmodule Explorer.Tags.AddressToTag do changeset_to_add_list = addresses_to_add |> Enum.map(fn address_hash_string -> - with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), - :ok <- Address.check_address_exists(address_hash) do - %{ - tag_id: tag_id, - address_hash: address_hash, - inserted_at: DateTime.utc_now(), - updated_at: DateTime.utc_now() - } - else - _ -> - nil - end + build_address_to_tag_changeset(address_hash_string, tag_id) end) |> Enum.filter(&(!is_nil(&1))) @@ -127,6 +116,21 @@ defmodule Explorer.Tags.AddressToTag do end end + defp build_address_to_tag_changeset(address_hash_string, tag_id) do + with {:ok, address_hash} <- Chain.string_to_address_hash(address_hash_string), + :ok <- Address.check_address_exists(address_hash) do + %{ + tag_id: tag_id, + address_hash: address_hash, + inserted_at: DateTime.utc_now(), + updated_at: DateTime.utc_now() + } + else + _ -> + nil + end + end + defp where_addresses(query, addresses_to_delete) do addresses_to_delete |> Enum.reduce(query, fn address_hash_string, acc -> diff --git a/apps/explorer/lib/explorer/third_party_integrations/sourcify.ex b/apps/explorer/lib/explorer/third_party_integrations/sourcify.ex index 38c716b65b80..2590dd008b4b 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/sourcify.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/sourcify.ex @@ -151,7 +151,7 @@ defmodule Explorer.ThirdPartyIntegrations.Sourcify do end end - def http_get_request(url, params) do + defp http_get_request(url, params) do request = HttpClient.get(url, [], params: params) case request do @@ -175,7 +175,7 @@ defmodule Explorer.ThirdPartyIntegrations.Sourcify do end end - def http_post_request(url, body) do + defp http_post_request(url, body) do request = Tesla.post(url, body) case request do diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index a43fa7fffc8a..28a0503c3e18 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -260,18 +260,7 @@ defmodule Explorer.Token.MetadataRetriever do case erc_1155_name_uri do %{:name => name} when is_binary(name) -> - sanitized_name = String.trim(name) - uri = {:ok, [sanitized_name]} - - with {:ok, %{metadata: metadata}} <- uri |> fetch_json(nil, nil, false) |> parse_fetch_json_response(), - true <- Map.has_key?(metadata, "name"), - false <- is_nil(metadata["name"]) do - name_metadata = %{:name => metadata["name"]} - - Map.merge(base_metadata, name_metadata) - else - _ -> base_metadata - end + fetch_erc1155_name_metadata(name, base_metadata) _ -> base_metadata @@ -281,6 +270,20 @@ defmodule Explorer.Token.MetadataRetriever do end end + defp fetch_erc1155_name_metadata(name, base_metadata) do + sanitized_name = String.trim(name) + uri = {:ok, [sanitized_name]} + + with {:ok, %{metadata: metadata}} <- uri |> fetch_json(nil, nil, false) |> parse_fetch_json_response(), + true <- Map.has_key?(metadata, "name"), + false <- is_nil(metadata["name"]) do + name_metadata = %{:name => metadata["name"]} + Map.merge(base_metadata, name_metadata) + else + _ -> base_metadata + end + end + @doc """ Parses the response from metadata fetching. diff --git a/apps/explorer/lib/explorer/visualize/sol2uml.ex b/apps/explorer/lib/explorer/visualize/sol2uml.ex index c26f5d1b894e..bec7eddfe7b8 100644 --- a/apps/explorer/lib/explorer/visualize/sol2uml.ex +++ b/apps/explorer/lib/explorer/visualize/sol2uml.ex @@ -13,7 +13,7 @@ defmodule Explorer.Visualize.Sol2uml do http_post_request(visualize_contracts_url(), body) end - def http_post_request(url, body) do + defp http_post_request(url, body) do headers = [{"Content-Type", "application/json"}] case HttpClient.post(url, Jason.encode!(body), headers, recv_timeout: @post_timeout) do diff --git a/apps/explorer/lib/test_helper.ex b/apps/explorer/lib/test_helper.ex index ea3697eed2d0..6219ddc15a3c 100644 --- a/apps/explorer/lib/test_helper.ex +++ b/apps/explorer/lib/test_helper.ex @@ -27,18 +27,22 @@ defmodule Explorer.TestHelper do :ok _ -> - case background_migration.db_index_operation() do - :ok -> - background_migration.update_cache() - :ok - - :error -> - raise "Background migrations failed" - end + execute_background_migration(background_migration) end end end + defp execute_background_migration(background_migration) do + case background_migration.db_index_operation() do + :ok -> + background_migration.update_cache() + :ok + + :error -> + raise "Background migrations failed" + end + end + def mock_erc7760_basic_requests( mox, error?, diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/backfill.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/backfill.ex index 7978619d3241..214c7762fd67 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/backfill.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/backfill.ex @@ -288,14 +288,7 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.Backfill do hash: Hash.to_string(tx.hash) } end) do - transaction_params - |> Enum.chunk_every(chunk_size) - |> Enum.reduce_while({:ok, []}, fn chunk, {:ok, acc} -> - case Receipts.fetch(chunk, json_rpc_named_arguments) do - {:ok, %{receipts: receipts}} -> {:cont, {:ok, acc ++ receipts}} - {:error, reason} -> {:halt, {:error, reason}} - end - end) + fetch_transaction_receipts_batch(transaction_params, chunk_size, json_rpc_named_arguments) else # It is assumed that this branch is unreachable, as there is a check for # `indexed_blocks?` above in the stack @@ -303,6 +296,17 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.Backfill do end end + defp fetch_transaction_receipts_batch(transaction_params, chunk_size, json_rpc_named_arguments) do + transaction_params + |> Enum.chunk_every(chunk_size) + |> Enum.reduce_while({:ok, []}, fn chunk, {:ok, acc} -> + case Receipts.fetch(chunk, json_rpc_named_arguments) do + {:ok, %{receipts: receipts}} -> {:cont, {:ok, acc ++ receipts}} + {:error, reason} -> {:halt, {:error, reason}} + end + end) + end + # Updates `Explorer.Chain.Block` and `Explorer.Chain.Transaction` records in the # database with Arbitrum-specific data. # diff --git a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/confirmations/rollup_blocks.ex b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/confirmations/rollup_blocks.ex index 05ef97e7f70d..bac843231d4e 100644 --- a/apps/indexer/lib/indexer/fetcher/arbitrum/workers/confirmations/rollup_blocks.ex +++ b/apps/indexer/lib/indexer/fetcher/arbitrum/workers/confirmations/rollup_blocks.ex @@ -220,22 +220,15 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.Confirmations.RollupBlocks do not genesis_reached?(first_unconfirmed_block, rollup_first_block) do log_info("End of the batch #{batch.number} discovered, moving to the previous batch") - {status, updated_rollup_blocks} = - discover_rollup_blocks_belonging_to_one_confirmation( - first_unconfirmed_block - 1, - confirmation_desc, - outbox_config, - rollup_first_block, - new_cache - ) - - case status do - :error -> {:error, []} - # updated_rollup_blocks will contain either [] if the previous batch - # already confirmed or list of unconfirmed blocks of all previous - # unconfirmed batches - :ok -> {:ok, unconfirmed_rollup_blocks ++ updated_rollup_blocks} - end + discover_previous_batch_blocks( + first_unconfirmed_block, + confirmation_desc, + outbox_config, + rollup_first_block, + new_cache, + unconfirmed_rollup_blocks, + raw_unconfirmed_rollup_blocks + ) else # During the process of new confirmations discovery it will show "N of N", # for the process of historical confirmations discovery it will show "N of M". @@ -248,6 +241,33 @@ defmodule Indexer.Fetcher.Arbitrum.Workers.Confirmations.RollupBlocks do end end + defp discover_previous_batch_blocks( + first_unconfirmed_block, + confirmation_desc, + outbox_config, + rollup_first_block, + new_cache, + unconfirmed_rollup_blocks, + _raw_unconfirmed_rollup_blocks + ) do + {status, updated_rollup_blocks} = + discover_rollup_blocks_belonging_to_one_confirmation( + first_unconfirmed_block - 1, + confirmation_desc, + outbox_config, + rollup_first_block, + new_cache + ) + + case status do + :error -> {:error, []} + # updated_rollup_blocks will contain either [] if the previous batch + # already confirmed or list of unconfirmed blocks of all previous + # unconfirmed batches + :ok -> {:ok, unconfirmed_rollup_blocks ++ updated_rollup_blocks} + end + end + # Determines if a rollup block number has reached the lowest indexed block of the chain. # # ## Parameters diff --git a/apps/indexer/lib/indexer/fetcher/filecoin/address_info.ex b/apps/indexer/lib/indexer/fetcher/filecoin/address_info.ex index 07d22a3321f8..84a4d18f4b8d 100644 --- a/apps/indexer/lib/indexer/fetcher/filecoin/address_info.ex +++ b/apps/indexer/lib/indexer/fetcher/filecoin/address_info.ex @@ -204,12 +204,7 @@ defmodule Indexer.Fetcher.Filecoin.AddressInfo do {:ok, maybe_actor_type_string} <- Map.fetch(body_json, "actor_type") do robust_address_string = if maybe_robust_address_string in ["", ""] do - operation.address_hash - |> NativeAddress.cast() - |> case do - {:ok, native_address} -> to_string(native_address) - _ -> nil - end + cast_native_address(operation.address_hash) else maybe_robust_address_string end @@ -239,6 +234,15 @@ defmodule Indexer.Fetcher.Filecoin.AddressInfo do end end + defp cast_native_address(address_hash) do + address_hash + |> NativeAddress.cast() + |> case do + {:ok, native_address} -> to_string(native_address) + _ -> nil + end + end + @spec full_fetch_address_info_using_filfox_api(PendingAddressOperation.t()) :: {:ok, :full, filecoin_address_params()} | :error defp full_fetch_address_info_using_filfox_api(operation) do diff --git a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex index fb80e9b64ed9..277f2b5782e5 100644 --- a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex @@ -516,20 +516,22 @@ defmodule Indexer.Fetcher.InternalTransaction do address_token_balances = %{token_transfers_params: token_transfers_with_token} |> AddressTokenBalances.params_set() - |> Enum.map(fn %{address_hash: address_hash, token_contract_address_hash: token_contract_address_hash} = entry -> - with {:ok, address_hash} <- Hash.Address.cast(address_hash), - {:ok, token_contract_address_hash} <- Hash.Address.cast(token_contract_address_hash) do - entry - |> Map.put(:address_hash, address_hash) - |> Map.put(:token_contract_address_hash, token_contract_address_hash) - else - error -> Logger.error("Failed to cast string to hash: #{inspect(error)}") - end - end) + |> Enum.map(&cast_address_hashes_for_token_balance/1) async_import_token_balances(%{address_token_balances: address_token_balances}, false) else :ok end end + + defp cast_address_hashes_for_token_balance(entry) do + with {:ok, address_hash} <- Hash.Address.cast(entry.address_hash), + {:ok, token_contract_address_hash} <- Hash.Address.cast(entry.token_contract_address_hash) do + entry + |> Map.put(:address_hash, address_hash) + |> Map.put(:token_contract_address_hash, token_contract_address_hash) + else + error -> Logger.error("Failed to cast string to hash: #{inspect(error)}") + end + end end diff --git a/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex b/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex index 9da58bdc7fb4..ae41da57b2b5 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/eip1559_config_update.ex @@ -320,57 +320,15 @@ defmodule Indexer.Fetcher.Optimism.EIP1559ConfigUpdate do case fetch_blocks_by_numbers(block_numbers, json_rpc_named_arguments, false) do {:ok, %Blocks{blocks_params: blocks_params}} -> # we only keep block numbers for the existing blocks - block_numbers_existing = - block_numbers - |> Enum.filter(fn block_number -> - Enum.any?(blocks_params, fn b -> - !is_nil(b) and b.number == block_number - end) - end) + block_numbers_existing = existing_block_numbers(block_numbers, blocks_params) + blocks_by_number = blocks_params_by_number(blocks_params) last_block_number = List.last(block_numbers_existing) Enum.reduce(block_numbers_existing, 0, fn block_number, acc -> - # credo:disable-for-next-line Credo.Check.Refactor.Nesting - block = Enum.find(blocks_params, %{extra_data: "0x"}, fn b -> b.number == block_number end) - - extra_data = hash_to_binary(block.extra_data) - - return = - with {:valid_format, true} <- {:valid_format, byte_size(extra_data) >= 9}, - <> = - extra_data, - prev_config = EIP1559ConfigUpdate.actual_config_for_block(block.number), - new_config = - (if version == 0 do - {denominator, elasticity, nil} - else - <> = rest_extra_data - {denominator, elasticity, min_base_fee} - end), - {:updated_config, true} <- {:updated_config, prev_config != new_config} do - update_config(block.number, block.hash, new_config) - - Logger.info( - "Config was updated at block #{block.number}. Previous one: #{inspect(prev_config)}. New one: #{inspect(new_config)}." - ) - - acc + 1 - else - {:valid_format, false} -> - Logger.warning("extraData of the block ##{block_number} has invalid format. Ignoring it.") - acc - - {:updated_config, false} -> - acc - end - - # credo:disable-for-next-line Credo.Check.Refactor.Nesting - if block.number == last_block_number do - Optimism.set_last_block_hash(block.hash, @counter_type_last_l2_block_hash) - end + block = Map.get(blocks_by_number, block_number, %{extra_data: "0x"}) - return + process_block_config(block, block_number, last_block_number, acc) end) {_, message_or_errors} -> @@ -391,6 +349,63 @@ defmodule Indexer.Fetcher.Optimism.EIP1559ConfigUpdate do end end + defp existing_block_numbers(block_numbers, blocks_params) do + existing_numbers = + blocks_params + |> Enum.reject(&is_nil/1) + |> MapSet.new(& &1.number) + + Enum.filter(block_numbers, &MapSet.member?(existing_numbers, &1)) + end + + defp blocks_params_by_number(blocks_params) do + blocks_params + |> Enum.reject(&is_nil/1) + |> Map.new(&{&1.number, &1}) + end + + defp process_block_config(block, block_number, last_block_number, acc) do + extra_data = hash_to_binary(block.extra_data) + + return = + with {:valid_format, true} <- {:valid_format, byte_size(extra_data) >= 9}, + <> = + extra_data, + prev_config = EIP1559ConfigUpdate.actual_config_for_block(block.number), + new_config = build_eip1559_config(version, denominator, elasticity, rest_extra_data), + {:updated_config, true} <- {:updated_config, prev_config != new_config} do + update_config(block.number, block.hash, new_config) + + Logger.info( + "Config was updated at block #{block.number}. Previous one: #{inspect(prev_config)}. New one: #{inspect(new_config)}." + ) + + acc + 1 + else + {:valid_format, false} -> + Logger.warning("extraData of the block ##{block_number} has invalid format. Ignoring it.") + acc + + {:updated_config, false} -> + acc + end + + if block.number == last_block_number do + Optimism.set_last_block_hash(block.hash, @counter_type_last_l2_block_hash) + end + + return + end + + defp build_eip1559_config(version, denominator, elasticity, rest_extra_data) do + if version == 0 do + {denominator, elasticity, nil} + else + <> = rest_extra_data + {denominator, elasticity, min_base_fee} + end + end + # Inserts a new row into the `op_eip1559_config_updates` database table. # # ## Parameters diff --git a/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex b/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex index 8b5d8f217150..d79cd4721b73 100644 --- a/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex +++ b/apps/indexer/lib/indexer/fetcher/optimism/transaction_batch.ex @@ -585,46 +585,53 @@ defmodule Indexer.Fetcher.Optimism.TransactionBatch do ) do blob_versioned_hashes |> Enum.reduce([], fn blob_hash, inputs_acc -> - with {:ok, response} <- Helper.http_get_request(blobs_api_url <> "/" <> blob_hash), - blob_data = Map.get(response, "blob_data"), - false <- is_nil(blob_data) do - # read the data from Blockscout API - decoded = - blob_data - |> hash_to_binary() - |> OptimismTransactionBatch.decode_eip4844_blob() - - if is_nil(decoded) do - Logger.warning("Cannot decode the blob #{blob_hash} taken from the Blockscout Blobs API.") - - inputs_acc - else - Logger.info( - "The input for transaction #{transaction_hash} is taken from the Blockscout Blobs API. Blob hash: #{blob_hash}" - ) - - input = %{ - bytes: decoded, - eip4844_blob_hash: blob_hash - } - - [input | inputs_acc] - end - else - _ -> - # read the data from the fallback source (beacon node) - eip4844_blobs_to_inputs_from_fallback( - transaction_hash, - blob_hash, - block_timestamp, - inputs_acc, - chain_id_l1 - ) - end + process_blob(blob_hash, transaction_hash, block_timestamp, inputs_acc, blobs_api_url, chain_id_l1) end) |> Enum.reverse() end + defp process_blob(blob_hash, transaction_hash, block_timestamp, inputs_acc, blobs_api_url, chain_id_l1) do + with {:ok, response} <- Helper.http_get_request(blobs_api_url <> "/" <> blob_hash), + blob_data = Map.get(response, "blob_data"), + false <- is_nil(blob_data) do + decode_and_process_blob(blob_data, blob_hash, transaction_hash, inputs_acc) + else + _ -> + # read the data from the fallback source (beacon node) + eip4844_blobs_to_inputs_from_fallback( + transaction_hash, + blob_hash, + block_timestamp, + inputs_acc, + chain_id_l1 + ) + end + end + + defp decode_and_process_blob(blob_data, blob_hash, transaction_hash, inputs_acc) do + # read the data from Blockscout API + decoded = + blob_data + |> hash_to_binary() + |> OptimismTransactionBatch.decode_eip4844_blob() + + if is_nil(decoded) do + Logger.warning("Cannot decode the blob #{blob_hash} taken from the Blockscout Blobs API.") + inputs_acc + else + Logger.info( + "The input for transaction #{transaction_hash} is taken from the Blockscout Blobs API. Blob hash: #{blob_hash}" + ) + + input = %{ + bytes: decoded, + eip4844_blob_hash: blob_hash + } + + [input | inputs_acc] + end + end + defp eip4844_blobs_to_inputs_from_fallback( transaction_hash, blob_hash, diff --git a/apps/indexer/lib/indexer/fetcher/zilliqa/zrc2_tokens.ex b/apps/indexer/lib/indexer/fetcher/zilliqa/zrc2_tokens.ex index 601b626bd5d1..5bd10be22b2e 100644 --- a/apps/indexer/lib/indexer/fetcher/zilliqa/zrc2_tokens.ex +++ b/apps/indexer/lib/indexer/fetcher/zilliqa/zrc2_tokens.ex @@ -449,52 +449,10 @@ defmodule Indexer.Fetcher.Zilliqa.Zrc2Tokens do zrc2_token_adapters = logs - |> Enum.filter(fn log -> - with false <- is_nil(log.first_topic), - true <- Hash.to_string(log.first_topic) == @zrc2_transfer_success_event, - # only ZRC-2 is supported - params = zrc2_event_params(log.data), - true <- Map.has_key?(params, :sender) && Map.has_key?(params, :recipient) && Map.has_key?(params, :amount), - true <- is_nil(zrc2_log_adapter_address_hash(log, adapter_address_hash_by_zrc2_address_hash)) do - transaction_input = transaction_by_hash[log.transaction_hash].input.bytes - - method_id = - if byte_size(transaction_input) >= 4 do - <> = transaction_input - "0x" <> Base.encode16(method_id, case: :lower) - end - - method_id == TokenTransfer.transfer_function_signature() - else - _ -> false - end - end) - |> Enum.reduce([], fn log, acc -> - transaction_hash = log.transaction_hash - to_address_hash = transaction_by_hash[transaction_hash].to_address_hash - - # are there any `Transfer` logs emitted by the `to_address_hash` in this transaction? - erc20_transfer_event_found = - logs - |> Enum.filter(&(&1.transaction_hash == transaction_hash)) - |> Enum.any?( - &(!is_nil(&1.first_topic) and Hash.to_string(&1.first_topic) == TokenTransfer.constant() and - &1.address_hash == to_address_hash) - ) - - if erc20_transfer_event_found do - acc - else - # if the `Transfer` log is not found, this is ERC-20 adapter contract address - [ - %{ - adapter_address_hash: Hash.to_string(to_address_hash), - zrc2_address_hash: Hash.to_string(log.address_hash) - } - | acc - ] - end - end) + |> Enum.filter( + &valid_zrc2_transfer_log?(&1, transaction_by_hash, adapter_address_hash_by_zrc2_address_hash, logs) + ) + |> Enum.reduce([], &build_zrc2_adapters(&1, &2, transaction_by_hash, logs)) |> Enum.uniq() if zrc2_token_adapters != [] do @@ -512,6 +470,63 @@ defmodule Indexer.Fetcher.Zilliqa.Zrc2Tokens do end end + # Checks if log is a valid ZRC-2 transfer log with correct event signature and method ID + defp valid_zrc2_transfer_log?(log, transaction_by_hash, adapter_address_hash_by_zrc2_address_hash, _logs) do + with false <- is_nil(log.first_topic), + true <- Hash.to_string(log.first_topic) == @zrc2_transfer_success_event, + # only ZRC-2 is supported + params = zrc2_event_params(log.data), + true <- Map.has_key?(params, :sender) && Map.has_key?(params, :recipient) && Map.has_key?(params, :amount), + true <- is_nil(zrc2_log_adapter_address_hash(log, adapter_address_hash_by_zrc2_address_hash)) do + transaction_input = transaction_by_hash[log.transaction_hash].input.bytes + + method_id = + if byte_size(transaction_input) >= 4 do + <> = transaction_input + "0x" <> Base.encode16(method_id, case: :lower) + else + nil + end + + method_id == TokenTransfer.transfer_function_signature() + else + _ -> false + end + end + + # Builds ZRC-2 token adapter from a log if no ERC-20 transfer was found in the same transaction + defp build_zrc2_adapters(log, acc, transaction_by_hash, logs) do + transaction_hash = log.transaction_hash + to_address_hash = transaction_by_hash[transaction_hash].to_address_hash + + # are there any `Transfer` logs emitted by the `to_address_hash` in this transaction? + erc20_transfer_event_found = + has_erc20_transfer_for_address?(logs, transaction_hash, to_address_hash) + + if erc20_transfer_event_found do + acc + else + # if the `Transfer` log is not found, this is ERC-20 adapter contract address + [ + %{ + adapter_address_hash: Hash.to_string(to_address_hash), + zrc2_address_hash: Hash.to_string(log.address_hash) + } + | acc + ] + end + end + + # Checks if there are `Transfer` logs emitted by the given address in the transaction + defp has_erc20_transfer_for_address?(logs, transaction_hash, to_address_hash) do + logs + |> Enum.filter(&(&1.transaction_hash == transaction_hash)) + |> Enum.any?( + &(!is_nil(&1.first_topic) and Hash.to_string(&1.first_topic) == TokenTransfer.constant() and + &1.address_hash == to_address_hash) + ) + end + # Scans the `zilliqa_zrc2_token_transfers` table for the rows that have corresponding # adapter addresses in the `zilliqa_zrc2_token_adapters` table. The found rows are inserted into the `token_transfers` # table (with the `token_contract_address_hash` == `adapter_address_hash` and `token_type` == "ZRC-2"), and then diff --git a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex index bc4bd35e6815..c7c239d59b64 100644 --- a/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex +++ b/apps/indexer/lib/indexer/pending_transactions_sanitizer.ex @@ -75,16 +75,7 @@ defmodule Indexer.PendingTransactionsSanitizer do %{id: id, result: result} -> pending_transaction = Map.fetch!(id_to_params, id) - if result do - fetch_block_and_invalidate_wrapper(pending_transaction, to_string(pending_transaction.hash), result) - else - Logger.debug( - "Transaction with hash #{pending_transaction.hash} doesn't exist in the node anymore. We should remove it from Blockscout DB.", - fetcher: :pending_transactions_to_refetch - ) - - fetch_pending_transaction_and_delete(pending_transaction) - end + handle_pending_transaction_result(pending_transaction, result) error -> Logger.error("Error while fetching pending transaction receipt: #{inspect(error)}") @@ -96,6 +87,19 @@ defmodule Indexer.PendingTransactionsSanitizer do ) end + defp handle_pending_transaction_result(pending_transaction, result) do + if result do + fetch_block_and_invalidate_wrapper(pending_transaction, to_string(pending_transaction.hash), result) + else + Logger.debug( + "Transaction with hash #{pending_transaction.hash} doesn't exist in the node anymore. We should remove it from Blockscout DB.", + fetcher: :pending_transactions_to_refetch + ) + + fetch_pending_transaction_and_delete(pending_transaction) + end + end + defp get_transaction_receipt_requests(id_to_params) do Enum.map(id_to_params, fn {id, transaction} -> request(%{id: id, method: "eth_getTransactionReceipt", params: [to_string(transaction.hash)]}) diff --git a/apps/indexer/lib/indexer/transform/address_coin_balances_daily.ex b/apps/indexer/lib/indexer/transform/address_coin_balances_daily.ex index dd446f5873de..72e3dfa4ca3e 100644 --- a/apps/indexer/lib/indexer/transform/address_coin_balances_daily.ex +++ b/apps/indexer/lib/indexer/transform/address_coin_balances_daily.ex @@ -23,20 +23,7 @@ defmodule Indexer.Transform.AddressCoinBalancesDaily do block.number == block_number end) - day = - if block do - DateTime.to_date(block.timestamp) - else - json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) - - with {:ok, %{"timestamp" => timestamp_raw}} <- - %{id: 1, number: block_number} - |> ByNumber.request(false) - |> json_rpc(json_rpc_named_arguments) do - timestamp = quantity_to_integer(timestamp_raw) - DateTime.from_unix!(timestamp) - end - end + day = resolve_day(block, block_number) [%{address_hash: address_hash, day: day} | acc] end) @@ -73,4 +60,20 @@ defmodule Indexer.Transform.AddressCoinBalancesDaily do coin_balances_daily_params_set end + + defp resolve_day(block, _block_number) when not is_nil(block), do: DateTime.to_date(block.timestamp) + + defp resolve_day(_block, block_number) do + json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) + + with {:ok, %{"timestamp" => timestamp_raw}} <- + %{id: 1, number: block_number} + |> ByNumber.request(false) + |> json_rpc(json_rpc_named_arguments) do + timestamp_raw + |> quantity_to_integer() + |> DateTime.from_unix!() + |> DateTime.to_date() + end + end end diff --git a/apps/nft_media_handler/lib/nft_media_handler.ex b/apps/nft_media_handler/lib/nft_media_handler.ex index c284da6d4e52..bebf45ed2824 100644 --- a/apps/nft_media_handler/lib/nft_media_handler.ex +++ b/apps/nft_media_handler/lib/nft_media_handler.ex @@ -167,8 +167,7 @@ defmodule NFTMediaHandler do "/" <> resource_id <- path do resource_id else - _ -> - if is_nil(path), do: host, else: host <> path + _ -> build_ipfs_resource_id(host, path) end {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} @@ -198,4 +197,8 @@ defmodule NFTMediaHandler do end end end + + defp build_ipfs_resource_id(host, path) do + if is_nil(path), do: host, else: host <> path + end end diff --git a/mix.lock b/mix.lock index e926e3c6adb5..347f936279ce 100644 --- a/mix.lock +++ b/mix.lock @@ -32,7 +32,7 @@ "cowboy": {:hex, :cowboy, "2.14.2", "4008be1df6ade45e4f2a4e9e2d22b36d0b5aba4e20b0a0d7049e28d124e34847", [:make, :rebar3], [{:cowlib, ">= 2.16.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, ">= 1.8.0 and < 3.0.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "569081da046e7b41b5df36aa359be71a0c8874e5b9cff6f747073fc57baf1ab9"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, "cowlib": {:hex, :cowlib, "2.16.0", "54592074ebbbb92ee4746c8a8846e5605052f29309d3a873468d76cdf932076f", [:make, :rebar3], [], "hexpm", "7f478d80d66b747344f0ea7708c187645cfcc08b11aa424632f78e25bf05db51"}, - "credo": {:hex, :credo, "1.7.15", "283da72eeb2fd3ccf7248f4941a0527efb97afa224bcdef30b4b580bc8258e1c", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "291e8645ea3fea7481829f1e1eb0881b8395db212821338e577a90bf225c5607"}, + "credo": {:hex, :credo, "1.7.17", "f92b6aa5b26301eaa5a35e4d48ebf5aa1e7094ac00ae38f87086c562caf8a22f", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1eb5645c835f0b6c9b5410f94b5a185057bcf6d62a9c2b476da971cde8749645"}, "csv": {:hex, :csv, "2.5.0", "c47b5a5221bf2e56d6e8eb79e77884046d7fd516280dc7d9b674251e0ae46246", [:mix], [{:parallel_stream, "~> 1.0.4 or ~> 1.1.0", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm", "e821f541487045c7591a1963eeb42afff0dfa99bdcdbeb3410795a2f59c77d34"}, "dataloader": {:hex, :dataloader, "2.0.2", "c45075e0692e68638a315e14f747bd8d7065fb5f38705cf980f62d4cd344401f", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4c6cabc0b55e96e7de74d14bf37f4a5786f0ab69aa06764a1f39dda40079b098"}, "db_connection": {:hex, :db_connection, "2.9.0", "a6a97c5c958a2d7091a58a9be40caf41ab496b0701d21e1d1abff3fa27a7f371", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "17d502eacaf61829db98facf6f20808ed33da6ccf495354a41e64fe42f9c509c"}, From 43ef7e242be886b03e3ffa01c74c9592b9a632f5 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 10 Mar 2026 12:05:39 +0300 Subject: [PATCH 04/42] fix: Allow disabling contract creation internal transaction association (#14097) --- .../controllers/api/v2/address_controller.ex | 49 +++++++++------ apps/explorer/lib/explorer/chain.ex | 61 ++++++++++++++++--- 2 files changed, 84 insertions(+), 26 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex index 5a1b0502999c..77c2a4838eb9 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/address_controller.ex @@ -154,10 +154,23 @@ defmodule BlockScoutWeb.API.V2.AddressController do } ] + @spec include_internal_transaction_association?() :: boolean() + defp include_internal_transaction_association? do + !Application.get_env(:explorer, :api_disable_contract_creation_internal_transaction_association, false) + end + + @spec hash_to_address_options(keyword()) :: keyword() + defp hash_to_address_options(options) do + Keyword.put( + options, + :include_internal_transaction_association?, + include_internal_transaction_association?() + ) + end + @spec contract_address_preloads() :: [keyword()] defp contract_address_preloads do - include_internal_tx = - !Application.get_env(:explorer, :api_disable_contract_creation_internal_transaction_association, false) + include_internal_tx = include_internal_transaction_association?() chain_type_associations = case chain_type() do @@ -194,7 +207,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do ip = AccessHelper.conn_to_ip_string(conn) with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, address} -> %Address{} = fully_preloaded_address = @@ -262,7 +275,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do def counters(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do # TODO: check if @address_options is needed here - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, address} -> {validation_count} = Counters.address_counters(address, @api_true) @@ -325,7 +338,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do ip = AccessHelper.conn_to_ip_string(conn) with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> token_balances = address_hash @@ -406,7 +419,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec transactions(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def transactions(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> options = @transaction_necessity_by_association @@ -501,7 +514,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params), {:ok, token_address_hash} <- validate_optional_address_hash(params[:token], params), token_address_exists <- (token_address_hash && Token.check_token_exists(token_address_hash)) || :ok do - case {Chain.hash_to_address(address_hash, @address_options), token_address_exists} do + case {Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)), token_address_exists} do {{:ok, _address}, :ok} -> paging_options = paging_options(params) @@ -587,7 +600,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec internal_transactions(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def internal_transactions(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> full_options = [ @@ -664,7 +677,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do def logs(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params), {:ok, topic} <- validate_optional_topic(params[:topic]) do - case Chain.hash_to_address(address_hash, @api_true) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@api_true)) do {:ok, _address} -> options = params @@ -737,7 +750,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec blocks_validated(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def blocks_validated(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> full_options = [ @@ -804,7 +817,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec coin_balance_history(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def coin_balance_history(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, address} -> full_options = params |> paging_options() |> Keyword.merge(@api_true) @@ -866,7 +879,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def coin_balance_history_by_day(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> balances_by_day = address_hash @@ -927,7 +940,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do ip = AccessHelper.conn_to_ip_string(conn) with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> results_plus_one = address_hash @@ -999,7 +1012,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec withdrawals(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def withdrawals(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> options = @api_true |> Keyword.merge(paging_options(params)) withdrawals_plus_one = address_hash |> Chain.address_hash_to_withdrawals(options) @@ -1147,7 +1160,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do beacon_deposits: :beacon_deposits_count } - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> counters_json = address_hash @@ -1224,7 +1237,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec nft_list(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def nft_list(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> results_plus_one = Instance.nft_list( @@ -1300,7 +1313,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec nft_collections(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def nft_collections(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params) do - case Chain.hash_to_address(address_hash, @address_options) do + case Chain.hash_to_address(address_hash, hash_to_address_options(@address_options)) do {:ok, _address} -> results_plus_one = Instance.nft_collections( @@ -1366,7 +1379,7 @@ defmodule BlockScoutWeb.API.V2.AddressController do @spec celo_election_rewards(Plug.Conn.t(), map()) :: {:format, :error} | {:restricted_access, true} | Plug.Conn.t() def celo_election_rewards(conn, %{address_hash_param: address_hash_string} = params) do with {:ok, address_hash} <- validate_address_hash(address_hash_string, params), - {:ok, _address} <- Chain.hash_to_address(address_hash, api?: true) do + {:ok, _address} <- Chain.hash_to_address(address_hash, hash_to_address_options(api?: true)) do full_options = @celo_election_rewards_options |> Keyword.put( diff --git a/apps/explorer/lib/explorer/chain.ex b/apps/explorer/lib/explorer/chain.ex index 20c5fb311833..4b91dbdbc57a 100644 --- a/apps/explorer/lib/explorer/chain.ex +++ b/apps/explorer/lib/explorer/chain.ex @@ -136,6 +136,8 @@ defmodule Explorer.Chain do @type paging_options :: {:paging_options, PagingOptions.t()} @typep balance_by_day :: %{date: String.t(), value: Wei.t()} @type api? :: {:api?, true | false} + @type include_internal_transaction_association? :: + {:include_internal_transaction_association?, true | false} @type ip :: {:ip, String.t()} @type show_scam_tokens? :: {:show_scam_tokens?, true | false} @@ -666,20 +668,24 @@ defmodule Explorer.Chain do then the `t:Explorer.Chain.Address.t/0` will not be included in the list. """ - @spec hash_to_address(Hash.Address.t() | binary(), [necessity_by_association_option | api?]) :: + @spec hash_to_address( + Hash.Address.t() | binary(), + [necessity_by_association_option | api? | include_internal_transaction_association?] + ) :: {:ok, Address.t()} | {:error, :not_found} def hash_to_address( hash, options \\ [ - necessity_by_association: %{ - :names => :optional, - :smart_contract => :optional, - :token => :optional, - Address.contract_creation_transaction_associations() => :optional - } + necessity_by_association: default_hash_to_address_necessity_by_association() ] ) do - necessity_by_association = Keyword.get(options, :necessity_by_association, %{}) + include_internal_transaction_association? = + Keyword.get(options, :include_internal_transaction_association?, true) + + necessity_by_association = + options + |> Keyword.get(:necessity_by_association, default_hash_to_address_necessity_by_association()) + |> maybe_remove_internal_transaction_association(include_internal_transaction_association?) query = Address.address_query(hash) @@ -693,6 +699,45 @@ defmodule Explorer.Chain do end end + defp default_hash_to_address_necessity_by_association do + %{ + :names => :optional, + :smart_contract => :optional, + :token => :optional, + Address.contract_creation_transaction_associations() => :optional + } + end + + defp maybe_remove_internal_transaction_association(necessity_by_association, true), + do: necessity_by_association + + defp maybe_remove_internal_transaction_association(necessity_by_association, false) do + necessity_by_association + |> replace_association_key( + Address.contract_creation_transaction_associations(), + Address.contract_creation_transaction_associations(false) + ) + |> replace_association_key( + Address.contract_creation_transaction_with_from_address_associations(), + Address.contract_creation_transaction_with_from_address_associations(false) + ) + |> Map.delete(:contract_creation_internal_transaction) + |> Map.delete(Address.contract_creation_internal_transaction_association()) + |> Map.delete(Address.contract_creation_internal_transaction_with_from_address_association()) + end + + defp replace_association_key(necessity_by_association, old_key, new_key) do + case Map.fetch(necessity_by_association, old_key) do + {:ok, value} -> + necessity_by_association + |> Map.delete(old_key) + |> Map.put(new_key, value) + + :error -> + necessity_by_association + end + end + @doc """ Converts `t:Explorer.Chain.Address.t/0` `hash` to the `t:Explorer.Chain.Address.t/0` with that `hash`. From 1520639a6000ed4f45b82da976c937207581b2f4 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 10 Mar 2026 12:18:37 +0300 Subject: [PATCH 05/42] Update CHANGELOG --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b6affa1b5ad..f66556fd846e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ ### ⚙️ Miscellaneous Tasks -- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090)) +- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090), [#14097](https://github.com/blockscout/blockscout/pull/14097)) ## 10.0.2 @@ -141,7 +141,7 @@ ### ⚙️ Miscellaneous Tasks -- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090)) +- Allow disabling contract creation internal transaction association ([#14090](https://github.com/blockscout/blockscout/issues/14090), [#14097](https://github.com/blockscout/blockscout/pull/14097)) ## 9.3.6 From f6e81f901a03876c1f655b4c36326c2d836b77f0 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Tue, 10 Mar 2026 14:42:21 +0300 Subject: [PATCH 06/42] fix: `confirm_otp` after `OpenApiSpex` integration (#14098) --- .../account/api/v2/authenticate_controller.ex | 50 +++-- .../api/v2/authenticate_controller_test.exs | 172 ++++++++++++++++++ 2 files changed, 195 insertions(+), 27 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex index aa6edf230b22..2dd89dfb0464 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex @@ -157,42 +157,38 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do } @doc """ - Confirms a one-time password (OTP) for a given email and updates the session. + Confirms a one-time password (OTP) for the email in the request body and establishes a user session on success. - This function verifies the OTP provided for a specific email address. If the - OTP is valid, it retrieves the authentication information and updates the - user's session accordingly. - - The function performs the following steps: - 1. Confirms the OTP with Auth0 and retrieves the authentication information. - 2. If successful, updates the session with the new authentication data. + Verifies the OTP via `Auth0.confirm_otp_and_get_auth/3`. On successful + verification, retrieves or creates the user's identity and + updates the session via `put_auth_to_session/2`. ## Parameters - - `conn`: The `Plug.Conn` struct representing the current connection. - - `params`: A map containing: - - `"email"`: The email address associated with the OTP. - - `"otp"`: The one-time password to be confirmed. + - `conn`: The `Plug.Conn` struct. The request body must + contain `:email` and `:otp` fields. + - `_params`: Unused. Email and OTP are read from + `conn.body_params`. ## Returns - - `:error`: If there's an unexpected error during the process. - - `{:error, any()}`: If there's a specific error during OTP confirmation or - session update. The error details are included. - - `Conn.t()`: A modified connection struct with updated session information - if the OTP is successfully confirmed. + - `Conn.t()` with a 200 status and rendered user info on + successful OTP confirmation. + - `{:enabled, false}` if Auth0 authentication is not enabled. + - `{:error, any()}` if OTP verification or session creation + fails. + - `:error` if an unexpected error occurs during OTP + verification or session creation. ## Notes - - Errors are handled later in `BlockScoutWeb.Account.API.V2.FallbackController`. - - This function relies on the Auth0 service to confirm the OTP and retrieve - the authentication information. - - The function handles both existing and newly created users. - - For newly created users, it may create a new authentication record if the - user is not immediately found in the search after OTP confirmation. - - The session update is handled by the `put_auth_to_session/2` function, which - perform additional operations such as setting cookies or rendering user - information. + - Errors are handled by + `BlockScoutWeb.Account.API.V2.FallbackController`. + - The client's IP address is forwarded to Auth0 for rate + limiting. """ @spec confirm_otp(Conn.t(), map()) :: :error | {:error, any()} | {:enabled, false} | Conn.t() - def confirm_otp(conn, %{email: email, otp: otp}) do + def confirm_otp(conn, _params) do + email = Map.get(conn.body_params, :email) + otp = Map.get(conn.body_params, :otp) + with {:enabled, true} <- {:enabled, Auth0.enabled?()}, {:ok, auth} <- Auth0.confirm_otp_and_get_auth(email, otp, AccessHelper.conn_to_ip_string(conn)) do put_auth_to_session(conn, auth) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs index 59950f5dbd1e..d7a5c5e61437 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs @@ -275,6 +275,171 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateControllerTest do end end + describe "POST api/account/v2/confirm_otp" do + setup do + initial_config = Application.get_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth) + + Application.put_env( + :ueberauth, + Ueberauth.Strategy.Auth0.OAuth, + Keyword.put(initial_config, :auth0_application_id, "test_app") + ) + + on_exit(fn -> + Application.put_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth, initial_config) + end) + + :ok + end + + # Regression test: after OpenApiSpex integration, confirm_otp must read + # email and otp from conn.body_params instead of the action's params argument. + # See commit dbf589ae25. + test "confirm OTP successfully", %{conn: conn} do + id_token = build_test_jwt(%{"sub" => "email|123", "email" => "test@example.com"}) + + user_json = + JSON.encode!(%{ + "user_id" => "email|123", + "email" => "test@example.com", + "email_verified" => true, + "name" => "Test User", + "nickname" => "test", + "picture" => "https://example.com/avatar.png", + "user_metadata" => %{ + "test_app" => %{ + "user_id" => "email|123", + "name" => "Test User", + "nickname" => "test", + "picture" => "https://example.com/avatar.png" + } + } + }) + + Tesla.Test.expect_tesla_call( + times: 3, + returns: fn + # OTP confirmation via OAuth2.Client + %{ + method: :post, + url: "https://example.com/oauth/token", + headers: [ + {"accept", "application/json"}, + {"auth0-forwarded-for", _ip}, + {"content-type", "application/json"} + ] + }, + _opts -> + {:ok, + %Tesla.Env{ + status: 200, + body: ~s({"access_token":"test_access","id_token":"#{id_token}","token_type":"Bearer"}) + }} + + # M2M JWT via HttpClient + %{ + method: :post, + url: "https://example.com/oauth/token", + query: [], + headers: [{"Content-type", "application/json"}] + }, + _opts -> + {:ok, + %Tesla.Env{ + status: 200, + body: ~s({"access_token": "test_token", "expires_in": 86400}) + }} + + # Get user by ID via OAuth2.Client + %Tesla.Env{ + method: :get, + url: "https://example.com/api/v2/users/" <> _, + headers: [{"accept", "application/json"}, {"authorization", "Bearer test_token"}], + body: "" + }, + _opts -> + {:ok, + %Tesla.Env{ + status: 200, + body: user_json + }} + end + ) + + response = + conn + |> put_req_header("content-type", "application/json") + |> post("/api/account/v2/confirm_otp", JSON.encode!(%{"email" => "test@example.com", "otp" => "123456"})) + |> json_response(200) + + assert response["email"] == "test@example.com" + assert response["name"] == "Test User" + end + + test "return error for wrong verification code", %{conn: conn} do + Tesla.Test.expect_tesla_call( + times: 1, + returns: fn + %{ + method: :post, + url: "https://example.com/oauth/token", + headers: [ + {"accept", "application/json"}, + {"auth0-forwarded-for", _ip}, + {"content-type", "application/json"} + ] + }, + _opts -> + {:ok, + %Tesla.Env{ + status: 403, + body: ~s({"error":"invalid_grant","error_description":"Wrong email or verification code."}) + }} + end + ) + + response = + conn + |> put_req_header("content-type", "application/json") + |> post("/api/account/v2/confirm_otp", JSON.encode!(%{"email" => "test@example.com", "otp" => "000000"})) + |> json_response(500) + + assert response == %{"message" => "Wrong verification code."} + end + + test "return error when max attempts reached", %{conn: conn} do + Tesla.Test.expect_tesla_call( + times: 1, + returns: fn + %{ + method: :post, + url: "https://example.com/oauth/token", + headers: [ + {"accept", "application/json"}, + {"auth0-forwarded-for", _ip}, + {"content-type", "application/json"} + ] + }, + _opts -> + {:ok, + %Tesla.Env{ + status: 403, + body: + ~s({"error":"invalid_grant","error_description":"You've reached the maximum number of attempts. Please try to login again."}) + }} + end + ) + + response = + conn + |> put_req_header("content-type", "application/json") + |> post("/api/account/v2/confirm_otp", JSON.encode!(%{"email" => "test@example.com", "otp" => "000000"})) + |> json_response(500) + + assert response == %{"message" => "Max attempts reached. Please resend code."} + end + end + describe "GET api/account/v2/siwe_message" do test "get SIWE message successfully", %{conn: conn} do address = build(:address) @@ -494,4 +659,11 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateControllerTest do assert response == %{"message" => "Dynamic integration is disabled"} end end + + defp build_test_jwt(claims) do + header = Base.url_encode64(JSON.encode!(%{"alg" => "HS256", "typ" => "JWT"}), padding: false) + payload = Base.url_encode64(JSON.encode!(claims), padding: false) + signature = Base.url_encode64("test_signature", padding: false) + "#{header}.#{payload}.#{signature}" + end end From 0007494e10cc9cd56e3efaee514381be27807be6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 15:41:16 +0300 Subject: [PATCH 07/42] chore(deps): bump phoenix_live_view from 1.1.24 to 1.1.26 (#14095) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 347f936279ce..54ba00cedec6 100644 --- a/mix.lock +++ b/mix.lock @@ -135,7 +135,7 @@ "phoenix_html": {:hex, :phoenix_html, "4.2.1", "35279e2a39140068fc03f8874408d58eef734e488fc142153f055c5454fd1c08", [:mix], [], "hexpm", "cff108100ae2715dd959ae8f2a8cef8e20b593f8dfd031c9cba92702cf23e053"}, "phoenix_html_helpers": {:hex, :phoenix_html_helpers, "1.0.1", "7eed85c52eff80a179391036931791ee5d2f713d76a81d0d2c6ebafe1e11e5ec", [:mix], [{:phoenix_html, "~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "cffd2385d1fa4f78b04432df69ab8da63dc5cf63e07b713a4dcf36a3740e3090"}, "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.2", "b18b0773a1ba77f28c52decbb0f10fd1ac4d3ae5b8632399bbf6986e3b665f62", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "d1f89c18114c50d394721365ffb428cce24f1c13de0467ffa773e2ff4a30d5b9"}, - "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.24", "1a000a048d5971b61a9efe29a3c4144ca955afd42224998d841c5011a5354838", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "0c724e6c65f197841cac49d73be4e0f9b93a7711eaa52d2d4d1b9f859c329267"}, + "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.26", "306af67d6557cc01f880107cc459f1fa0acbaab60bc8c027a368ba16b3544473", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "0ec34b24c69aa70c4f25a8901effe3462bee6c8ca80a9a4a7685215e3a0ac34e"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.2.0", "ff3a5616e1bed6804de7773b92cbccfc0b0f473faf1f63d7daf1206c7aeaaa6f", [:mix], [], "hexpm", "adc313a5bf7136039f63cfd9668fde73bba0765e0614cba80c06ac9460ff3e96"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, "phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"}, From ee00026ee17ba91e1cddea13764c7f9fd9287df2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 16:24:17 +0300 Subject: [PATCH 08/42] chore(deps): bump telemetry from 1.3.0 to 1.4.1 (#14092) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- apps/explorer/mix.exs | 2 +- mix.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/explorer/mix.exs b/apps/explorer/mix.exs index 1566dd6bbd2a..a2276ff26d5c 100644 --- a/apps/explorer/mix.exs +++ b/apps/explorer/mix.exs @@ -111,7 +111,7 @@ defmodule Explorer.Mixfile do # `:spandex` tracing of `:ecto` {:spandex_ecto, "~> 0.7.0"}, # Attach `:prometheus_ecto` to `:ecto` - {:telemetry, "~> 1.3.0"}, + {:telemetry, "~> 1.4.1"}, # `Timex.Duration` for `Explorer.Chain.Cache.Counters.AverageBlockTime.average_block_time/0` {:timex, "~> 3.7.1"}, {:con_cache, "~> 1.0"}, diff --git a/mix.lock b/mix.lock index 54ba00cedec6..8f4122d08283 100644 --- a/mix.lock +++ b/mix.lock @@ -169,7 +169,7 @@ "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, "statistex": {:hex, :statistex, "1.1.0", "7fec1eb2f580a0d2c1a05ed27396a084ab064a40cfc84246dbfb0c72a5c761e5", [:mix], [], "hexpm", "f5950ea26ad43246ba2cce54324ac394a4e7408fdcf98b8e230f503a0cba9cf5"}, "sweet_xml": {:hex, :sweet_xml, "0.7.5", "803a563113981aaac202a1dbd39771562d0ad31004ddbfc9b5090bdcd5605277", [:mix], [], "hexpm", "193b28a9b12891cae351d81a0cead165ffe67df1b73fe5866d10629f4faefb12"}, - "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, + "telemetry": {:hex, :telemetry, "1.4.1", "ab6de178e2b29b58e8256b92b382ea3f590a47152ca3651ea857a6cae05ac423", [:rebar3], [], "hexpm", "2172e05a27531d3d31dd9782841065c50dd5c3c7699d95266b2edd54c2dafa1c"}, "tesla": {:hex, :tesla, "1.16.0", "de77d083aea08ebd1982600693ff5d779d68a4bb835d136a0394b08f69714660", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.21", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "eb3bdfc0c6c8a23b4e3d86558e812e3577acff1cb4acb6cfe2da1985a1035b89"}, "timex": {:hex, :timex, "3.7.13", "0688ce11950f5b65e154e42b47bf67b15d3bc0e0c3def62199991b8a8079a1e2", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.26", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "09588e0522669328e973b8b4fd8741246321b3f0d32735b589f78b136e6d4c54"}, "ton": {:hex, :ton, "0.5.1", "79745434a93e5f7de3572fdcf04feb048620f0edab9794fc412a73528672927d", [:mix], [{:cafezinho, "~> 0.4.4", [hex: :cafezinho, repo: "hexpm", optional: false]}, {:evil_crc32c, "~> 0.2.9", [hex: :evil_crc32c, repo: "hexpm", optional: false]}, {:ex_pbkdf2, "~> 0.8.4", [hex: :ex_pbkdf2, repo: "hexpm", optional: false]}, {:mnemoniac, "~> 0.1.4", [hex: :mnemoniac, repo: "hexpm", optional: false]}], "hexpm", "916f656c870902a61690347da9500c5ce27f04c02e02441363bac7b128030f07"}, From bb591e6d2c5ce321ae9ce8636bd513387c3e7136 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 16:24:36 +0300 Subject: [PATCH 09/42] chore(deps): bump ex_cldr from 2.47.0 to 2.47.1 (#14096) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 8f4122d08283..31bc2bce0938 100644 --- a/mix.lock +++ b/mix.lock @@ -55,7 +55,7 @@ "ex_aws": {:hex, :ex_aws, "2.6.1", "194582c7b09455de8a5ab18a0182e6dd937d53df82be2e63c619d01bddaccdfa", [:mix], [{:configparser_ex, "~> 5.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:req, "~> 0.5.10 or ~> 0.6 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "67842a08c90a1d9a09dbe4ac05754175c7ca253abe4912987c759395d4bd9d26"}, "ex_aws_s3": {:hex, :ex_aws_s3, "2.5.9", "862b7792f2e60d7010e2920d79964e3fab289bc0fd951b0ba8457a3f7f9d1199", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "a480d2bb2da64610014021629800e1e9457ca5e4a62f6775bffd963360c2bf90"}, "ex_brotli": {:hex, :ex_brotli, "0.5.0", "573645db5201317b6176b8858b668ea4ca89dc5c21852e84b9867579d483c220", [:mix], [{:phoenix, ">= 0.0.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:rustler, "~> 0.29", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.6", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "8447d98d51f8f312629fd38619d4f564507dcf3a03d175c3f8f4ddf98e46dd92"}, - "ex_cldr": {:hex, :ex_cldr, "2.47.0", "350cab41e7deac2ab65cedf71e21e055a52927543dc84570abd8c686ac00cb4d", [:mix], [{:cldr_utils, "~> 2.28", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19 or ~> 1.0", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "3e454cbe4354f042795ae0562686e5137d4cfb953f3bc54c87077ac24c17be09"}, + "ex_cldr": {:hex, :ex_cldr, "2.47.1", "2dd2f0da2d5720bf413e0320cfd0ea7f0259a888c33e727c5f0db6bab3380252", [:mix], [{:cldr_utils, "~> 2.28", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19 or ~> 1.0", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "2555d6599d16311a096d8cb2d02e9dc3011ca02abbae446817d4f445a286c758"}, "ex_cldr_currencies": {:hex, :ex_cldr_currencies, "2.17.0", "c38d76339dbee413f7dd1aba4cdf05758bd4c0bbfe9c3b1c8602f96082c2890a", [:mix], [{:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "9af59bd29407dcca59fa39ded8c1649ae1cf6ec29fd0611576dcad0279bce0db"}, "ex_cldr_lists": {:hex, :ex_cldr_lists, "2.11.1", "ad18f861d7c5ca82aac6d173469c6a2339645c96790172ab0aa255b64fb7303b", [:mix], [{:ex_cldr_numbers, "~> 2.25", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "00161c04510ccb3f18b19a6b8562e50c21f1e9c15b8ff4c934bea5aad0b4ade2"}, "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.38.0", "b5564b57d3769c85e16689472a9bb65804f71ccd3484144e31998398fda25ad1", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:digital_token, "~> 0.3 or ~> 1.0", [hex: :digital_token, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.45", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.17", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "b29e4d723c69db5d0a3f3bcef7583a0bc87dda1cd642187c589fec4bfc59a703"}, From 32e27d18c556e80418ff58e46aae173445a98d45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 16:53:59 +0300 Subject: [PATCH 10/42] chore(deps): bump ecto_sql from 3.13.4 to 3.13.5 (#14093) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 31bc2bce0938..871df9d7416b 100644 --- a/mix.lock +++ b/mix.lock @@ -45,7 +45,7 @@ "dns": {:hex, :dns, "2.4.0", "44790a0375b28bdc7b59fc894460bfcb03ffeec4c5984e2c3e8b0797b1518327", [:mix], [], "hexpm", "e178e353c469820d02ba889d6a80d01c8c27b47dfcda4016a9cbc6218e3eed64"}, "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, "ecto": {:hex, :ecto, "3.13.5", "9d4a69700183f33bf97208294768e561f5c7f1ecf417e0fa1006e4a91713a834", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "df9efebf70cf94142739ba357499661ef5dbb559ef902b68ea1f3c1fabce36de"}, - "ecto_sql": {:hex, :ecto_sql, "3.13.4", "b6e9d07557ddba62508a9ce4a484989a5bb5e9a048ae0e695f6d93f095c25d60", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2b38cf0749ca4d1c5a8bcbff79bbe15446861ca12a61f9fba604486cb6b62a14"}, + "ecto_sql": {:hex, :ecto_sql, "3.13.5", "2f8282b2ad97bf0f0d3217ea0a6fff320ead9e2f8770f810141189d182dc304e", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aa36751f4e6a2b56ae79efb0e088042e010ff4935fc8684e74c23b1f49e25fdc"}, "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"}, "erlex": {:hex, :erlex, "0.2.8", "cd8116f20f3c0afe376d1e8d1f0ae2452337729f68be016ea544a72f767d9c12", [:mix], [], "hexpm", "9d66ff9fedf69e49dc3fd12831e12a8a37b76f8651dd21cd45fcf5561a8a7590"}, "eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"}, From dd36bdbba38453b4eecbc631de3c44c20d41ffd1 Mon Sep 17 00:00:00 2001 From: l0gun0v <98526047+l0gun0v@users.noreply.github.com> Date: Wed, 11 Mar 2026 09:12:49 +0100 Subject: [PATCH 11/42] feat: FHE operations and tags (#13742) Co-authored-by: Maksym Zub --- .../api/v2/transaction_controller.ex | 47 ++ .../lib/block_scout_web/routers/api_router.ex | 2 + .../schemas/api/v2/fhe_operation.ex | 131 +++ .../schemas/api/v2/transaction.ex | 10 +- .../views/api/v2/transaction_view.ex | 97 ++- .../api/v2/transaction_controller_test.exs | 180 +++- .../chain/fhe/fhe_contract_checker.ex | 160 ++++ .../lib/explorer/chain/fhe/fhe_operation.ex | 104 +++ .../explorer/chain/fhe/fhe_operator_prices.ex | 582 +++++++++++++ .../explorer/lib/explorer/chain/fhe/parser.ex | 476 +++++++++++ .../chain/import/runner/fhe_operations.ex | 155 ++++ .../stage/block_transaction_referencing.ex | 3 +- .../lib/explorer/chain/transaction.ex | 1 + .../explorer/lib/explorer/tags/address_tag.ex | 2 +- .../explorer/tags/address_tag_cataloger.ex | 2 +- .../20251214200315_create_fhe_operations.exs | 46 + .../chain/fhe/fhe_contract_checker_test.exs | 166 ++++ .../explorer/chain/fhe/fhe_operation_test.exs | 141 ++++ .../chain/fhe/fhe_operator_prices_test.exs | 84 ++ .../test/explorer/chain/fhe/parser_test.exs | 798 ++++++++++++++++++ .../import/runner/fhe_operations_test.exs | 227 +++++ apps/explorer/test/support/factory.ex | 27 + apps/indexer/lib/indexer/block/fetcher.ex | 5 +- .../lib/indexer/transform/fhe_operations.ex | 155 ++++ config/runtime.exs | 3 + cspell.json | 6 + 26 files changed, 3595 insertions(+), 15 deletions(-) create mode 100644 apps/block_scout_web/lib/block_scout_web/schemas/api/v2/fhe_operation.ex create mode 100644 apps/explorer/lib/explorer/chain/fhe/fhe_contract_checker.ex create mode 100644 apps/explorer/lib/explorer/chain/fhe/fhe_operation.ex create mode 100644 apps/explorer/lib/explorer/chain/fhe/fhe_operator_prices.ex create mode 100644 apps/explorer/lib/explorer/chain/fhe/parser.ex create mode 100644 apps/explorer/lib/explorer/chain/import/runner/fhe_operations.ex create mode 100644 apps/explorer/priv/repo/migrations/20251214200315_create_fhe_operations.exs create mode 100644 apps/explorer/test/explorer/chain/fhe/fhe_contract_checker_test.exs create mode 100644 apps/explorer/test/explorer/chain/fhe/fhe_operation_test.exs create mode 100644 apps/explorer/test/explorer/chain/fhe/fhe_operator_prices_test.exs create mode 100644 apps/explorer/test/explorer/chain/fhe/parser_test.exs create mode 100644 apps/explorer/test/explorer/chain/import/runner/fhe_operations_test.exs create mode 100644 apps/indexer/lib/indexer/transform/fhe_operations.ex diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex index 1c1d26c6ee71..3e21278a72ea 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/transaction_controller.ex @@ -55,6 +55,7 @@ defmodule BlockScoutWeb.API.V2.TransactionController do alias Explorer.Chain.Beacon.Deposit, as: BeaconDeposit alias Explorer.Chain.Beacon.Reader, as: BeaconReader alias Explorer.Chain.Cache.Counters.{NewPendingTransactionsCount, Transactions24hCount} + alias Explorer.Chain.FheOperation alias Explorer.Chain.{Hash, Transaction} alias Explorer.Chain.Optimism.TransactionBatch, as: OptimismTransactionBatch alias Explorer.Chain.PolygonZkevm.Reader, as: PolygonZkevmReader @@ -851,6 +852,52 @@ defmodule BlockScoutWeb.API.V2.TransactionController do unprocessable_entity: JsonErrorResponse.response() ] + operation :fhe_operations, + summary: "List FHE operations for a specific transaction", + description: + "Retrieves Fully Homomorphic Encryption (FHE) operations parsed from transaction logs. Includes operation details, HCU (Homomorphic Compute Unit) costs, operation types, and related metadata.", + parameters: [transaction_hash_param() | base_params()], + responses: [ + ok: + {"FHE operations for the specified transaction with transaction-level metrics.", "application/json", + Schemas.FheOperationsResponse}, + not_found: NotFoundResponse.response(), + unprocessable_entity: JsonErrorResponse.response() + ] + + @doc """ + Lists FHE operations for a specific transaction. + + Retrieves Fully Homomorphic Encryption (FHE) operations parsed from transaction + logs. Returns operation details, HCU (Homomorphic Compute Unit) costs, operation + types, and transaction-level metrics (total HCU, max depth, operation count). + + ## Parameters + - `conn` - The Plug.Conn. + - `params` - Map containing `:transaction_hash_param` (transaction hash string). + + ## Returns + - `Plug.Conn.t()` with 200 and JSON body on success. + - `{atom(), any()}` error tuple on validation failure (e.g. invalid hash). + """ + @spec fhe_operations(Plug.Conn.t(), map()) :: Plug.Conn.t() | {atom(), any()} + def fhe_operations(conn, %{transaction_hash_param: transaction_hash_string} = params) do + with {:ok, _transaction, transaction_hash} <- validate_transaction(transaction_hash_string, params) do + # Fetch pre-parsed FHE operations from database + operations = FheOperation.by_transaction_hash(transaction_hash) + metrics = FheOperation.transaction_metrics(transaction_hash) + + conn + |> put_status(200) + |> render(:fhe_operations, + operations: operations, + total_hcu: metrics.total_hcu, + max_depth_hcu: metrics.max_depth_hcu, + operation_count: metrics.operation_count + ) + end + end + @doc """ Function to handle GET requests to `/api/v2/transactions/:transaction_hash_param/state-changes` endpoint. """ diff --git a/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex index 24f67d4cf2c0..b2e210211dc5 100644 --- a/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/api_router.ex @@ -186,6 +186,8 @@ defmodule BlockScoutWeb.Routers.ApiRouter do get("/:transaction_hash_param/token-transfers", V2.TransactionController, :token_transfers) get("/:transaction_hash_param/internal-transactions", V2.TransactionController, :internal_transactions) get("/:transaction_hash_param/logs", V2.TransactionController, :logs) + get("/:transaction_hash_param/fhe-operations", V2.TransactionController, :fhe_operations) + get("/:transaction_hash_param/raw-trace", V2.TransactionController, :raw_trace) get("/:transaction_hash_param/state-changes", V2.TransactionController, :state_changes) get("/:transaction_hash_param/summary", V2.TransactionController, :summary) diff --git a/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/fhe_operation.ex b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/fhe_operation.ex new file mode 100644 index 000000000000..8dbee7d3d500 --- /dev/null +++ b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/fhe_operation.ex @@ -0,0 +1,131 @@ +defmodule BlockScoutWeb.Schemas.API.V2.FheOperation do + @moduledoc """ + This module defines the schema for the FHE Operation struct. + """ + require OpenApiSpex + + alias BlockScoutWeb.Schemas.API.V2.{Address, General} + alias OpenApiSpex.Schema + + @fhe_operation_type_enum [ + "arithmetic", + "bitwise", + "comparison", + "unary", + "control", + "encryption", + "random" + ] + + @fhe_type_enum [ + "Bool", + "Uint8", + "Uint16", + "Uint32", + "Uint64", + "Uint128", + "Uint160", + "Uint256", + "Bytes64", + "Bytes128", + "Bytes256" + ] + + @fhe_operation_inputs_schema %Schema{ + type: :object, + properties: %{ + lhs: %Schema{type: :string, nullable: true}, + rhs: %Schema{type: :string, nullable: true}, + ct: %Schema{type: :string, nullable: true}, + control: %Schema{type: :string, nullable: true}, + if_true: %Schema{type: :string, nullable: true}, + if_false: %Schema{type: :string, nullable: true}, + plaintext: %Schema{type: :number, nullable: true} + }, + additionalProperties: false + } + + OpenApiSpex.schema(%{ + type: :object, + properties: %{ + log_index: %Schema{type: :integer, nullable: false}, + operation: %Schema{type: :string, nullable: false, example: "FheAdd"}, + type: %Schema{ + type: :string, + enum: @fhe_operation_type_enum, + nullable: false, + example: "arithmetic" + }, + fhe_type: %Schema{ + type: :string, + enum: @fhe_type_enum, + nullable: false, + example: "Uint8" + }, + is_scalar: %Schema{type: :boolean, nullable: false, example: false}, + hcu_cost: %Schema{type: :integer, nullable: false, example: 100, minimum: 0}, + hcu_depth: %Schema{type: :integer, nullable: false, example: 1, minimum: 0}, + caller: %Schema{allOf: [Address], nullable: true}, + inputs: @fhe_operation_inputs_schema, + result: General.HexString, + block_number: %Schema{type: :integer, nullable: false, example: 12_345_678} + }, + required: [ + :log_index, + :operation, + :type, + :fhe_type, + :is_scalar, + :hcu_cost, + :hcu_depth, + :inputs, + :result, + :block_number + ], + additionalProperties: false + }) +end + +defmodule BlockScoutWeb.Schemas.API.V2.FheOperationsResponse do + @moduledoc """ + This module defines the schema for the FHE Operations response from /api/v2/transactions/:transaction_hash_param/fhe-operations. + """ + require OpenApiSpex + + alias BlockScoutWeb.Schemas.API.V2.FheOperation + alias OpenApiSpex.Schema + + OpenApiSpex.schema(%{ + type: :object, + properties: %{ + items: %Schema{ + type: :array, + items: %Schema{allOf: [FheOperation], nullable: false}, + nullable: false + }, + total_hcu: %Schema{ + type: :integer, + nullable: false, + description: "Total HCU (Homomorphic Compute Units) cost for all operations in the transaction", + example: 500, + minimum: 0 + }, + max_depth_hcu: %Schema{ + type: :integer, + nullable: false, + description: "Maximum HCU depth across all operations in the transaction", + example: 3, + minimum: 0 + }, + operation_count: %Schema{ + type: :integer, + nullable: false, + description: "Total number of FHE operations in the transaction", + example: 5, + minimum: 0 + } + }, + required: [:items, :total_hcu, :max_depth_hcu, :operation_count], + additionalProperties: false + }) +end diff --git a/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/transaction.ex b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/transaction.ex index cc11fed2369d..f1e089835910 100644 --- a/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/transaction.ex +++ b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/transaction.ex @@ -475,7 +475,12 @@ defmodule BlockScoutWeb.Schemas.API.V2.Transaction do }, has_error_in_internal_transactions: %Schema{type: :boolean, nullable: true}, authorization_list: %Schema{type: :array, items: SignedAuthorization, nullable: true}, - is_pending_update: %Schema{type: :boolean, nullable: true} + is_pending_update: %Schema{type: :boolean, nullable: true}, + fhe_operations_count: %Schema{ + type: :integer, + description: "Number of FHE (Fully Homomorphic Encryption) operations in the transaction", + nullable: false + } }, required: [ :hash, @@ -514,7 +519,8 @@ defmodule BlockScoutWeb.Schemas.API.V2.Transaction do :transaction_tag, :has_error_in_internal_transactions, :authorization_list, - :is_pending_update + :is_pending_update, + :fhe_operations_count ], additionalProperties: false } diff --git a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex index 647872f7a85f..f086e7ee5e74 100644 --- a/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex +++ b/apps/block_scout_web/lib/block_scout_web/views/api/v2/transaction_view.ex @@ -86,7 +86,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do |> with_chain_type_transformations() |> Enum.zip(decoded_transactions) |> Enum.map(fn {transaction, decoded_input} -> - prepare_transaction(transaction, conn, false, block_height, decoded_input) + prepare_transaction(transaction, conn, false, block_height, nil, decoded_input) end), "next_page_params" => next_page_params } @@ -106,7 +106,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do |> with_chain_type_transformations() |> Enum.zip(decoded_transactions) |> Enum.map(fn {transaction, decoded_input} -> - prepare_transaction(transaction, conn, false, block_height, decoded_input) + prepare_transaction(transaction, conn, false, block_height, nil, decoded_input) end) end @@ -116,7 +116,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do transaction |> with_chain_type_transformations() - |> prepare_transaction(conn, true, block_height, decoded_input) + |> prepare_transaction(conn, true, block_height, nil, decoded_input) end def render("raw_trace.json", %{raw_traces: raw_traces}) do @@ -238,6 +238,34 @@ defmodule BlockScoutWeb.API.V2.TransactionView do |> Enum.map(&prepare_signed_authorization/1) end + @doc """ + Renders FHE operations for a transaction as JSON. + + Returns a map with items (list of operation objects), total_hcu, max_depth_hcu, + and operation_count. Each item includes log_index, operation, type, fhe_type, + is_scalar, hcu_cost, hcu_depth, caller, inputs, result, and block_number. + + ## Parameters + - `assigns` - Map with `:operations` (list of FheOperation.t()), `:total_hcu`, + `:max_depth_hcu`, and `:operation_count`. + + ## Returns + - Map with "items", "total_hcu", "max_depth_hcu", "operation_count" keys. + """ + def render("fhe_operations.json", %{ + operations: operations, + total_hcu: total_hcu, + max_depth_hcu: max_depth_hcu, + operation_count: operation_count + }) do + %{ + "items" => Enum.map(operations, &prepare_fhe_operation/1), + "total_hcu" => total_hcu, + "max_depth_hcu" => max_depth_hcu, + "operation_count" => operation_count + } + end + @doc """ Returns the ABI of a smart contract or an empty list if the smart contract is nil """ @@ -417,7 +445,14 @@ defmodule BlockScoutWeb.API.V2.TransactionView do end end - defp prepare_transaction(transaction, conn, single_transaction?, block_height, watchlist_names \\ nil, decoded_input) + defp prepare_transaction( + transaction, + conn, + single_transaction?, + block_height, + watchlist_names, + decoded_input + ) defp prepare_transaction( {%Reward{} = emission_reward, %Reward{} = validator_reward}, @@ -456,7 +491,7 @@ defmodule BlockScoutWeb.API.V2.TransactionView do watchlist_names, decoded_input ) do - base_fee_per_gas = transaction.block && transaction.block.base_fee_per_gas + base_fee_per_gas = base_fee_per_gas(transaction) max_priority_fee_per_gas = transaction.max_priority_fee_per_gas max_fee_per_gas = transaction.max_fee_per_gas @@ -504,14 +539,14 @@ defmodule BlockScoutWeb.API.V2.TransactionView do "confirmation_duration" => processing_time_duration(transaction), "value" => transaction.value, "fee" => transaction |> Transaction.fee(:wei) |> format_fee(), - "gas_price" => transaction.gas_price || Transaction.effective_gas_price(transaction), + "gas_price" => gas_price_for_display(transaction), "type" => transaction.type, "gas_used" => transaction.gas_used, "gas_limit" => transaction.gas, "max_fee_per_gas" => transaction.max_fee_per_gas, "max_priority_fee_per_gas" => transaction.max_priority_fee_per_gas, "base_fee_per_gas" => base_fee_per_gas, - "priority_fee" => priority_fee_per_gas && Wei.mult(priority_fee_per_gas, transaction.gas_used), + "priority_fee" => priority_fee_display(priority_fee_per_gas, transaction), "transaction_burnt_fee" => burnt_fees(transaction, base_fee_per_gas), "nonce" => transaction.nonce, "position" => transaction.index, @@ -529,13 +564,34 @@ defmodule BlockScoutWeb.API.V2.TransactionView do GetTransactionTags.get_transaction_tags(transaction.hash, current_user(single_transaction? && conn)), "has_error_in_internal_transactions" => transaction.has_error_in_internal_transactions, "authorization_list" => authorization_list(transaction.signed_authorizations), - "is_pending_update" => transaction.block && transaction.block.refetch_needed + "is_pending_update" => transaction_pending_update?(transaction), + "fhe_operations_count" => fhe_operations_count_display(transaction) } result |> with_chain_type_fields(transaction, single_transaction?, conn, watchlist_names) end + defp base_fee_per_gas(transaction) do + transaction.block && transaction.block.base_fee_per_gas + end + + defp gas_price_for_display(transaction) do + transaction.gas_price || Transaction.effective_gas_price(transaction) + end + + defp priority_fee_display(priority_fee_per_gas, transaction) do + priority_fee_per_gas && Wei.mult(priority_fee_per_gas, transaction.gas_used) + end + + defp transaction_pending_update?(transaction) do + transaction.block && transaction.block.refetch_needed + end + + defp fhe_operations_count_display(transaction) do + transaction.fhe_operations_count || 0 + end + # Calculates burnt fees for a transaction. # # ## Parameters @@ -1027,4 +1083,29 @@ defmodule BlockScoutWeb.API.V2.TransactionView do ) do result end + + defp prepare_fhe_operation(operation) do + caller_info = + if operation.caller do + # operation.caller is an Explorer.Chain.Hash struct + address_hash = operation.caller + Helper.address_with_info(nil, %{hash: address_hash}, address_hash, false) + else + nil + end + + %{ + "log_index" => operation.log_index, + "operation" => operation.operation, + "type" => operation.operation_type, + "fhe_type" => operation.fhe_type, + "is_scalar" => operation.is_scalar, + "hcu_cost" => operation.hcu_cost, + "hcu_depth" => operation.hcu_depth, + "caller" => caller_info, + "inputs" => operation.input_handles, + "result" => "0x" <> Base.encode16(operation.result_handle, case: :lower), + "block_number" => operation.block_number + } + end end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs index b77b3850530f..e4bd0655e889 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs @@ -9,7 +9,7 @@ defmodule BlockScoutWeb.API.V2.TransactionControllerTest do import Mox alias Explorer.Account.{Identity, WatchlistAddress} - alias Explorer.Chain.{Address, InternalTransaction, Log, Token, TokenTransfer, Transaction, Wei} + alias Explorer.Chain.{Address, FheOperation, InternalTransaction, Log, Token, TokenTransfer, Transaction, Wei} alias Explorer.Chain.Beacon.Deposit, as: BeaconDeposit alias Explorer.{Repo, TestHelper} @@ -1369,6 +1369,184 @@ defmodule BlockScoutWeb.API.V2.TransactionControllerTest do end end + describe "/transactions/{transaction_hash}/fhe-operations" do + test "return 404 on non existing transaction", %{conn: conn} do + transaction = build(:transaction) + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/fhe-operations") + + assert %{"message" => "Not found"} = json_response(request, 404) + end + + test "return 422 on invalid transaction hash", %{conn: conn} do + request = get(conn, "/api/v2/transactions/0x/fhe-operations") + + assert %{ + "errors" => [ + %{ + "detail" => "Invalid format. Expected ~r/^0x([A-Fa-f0-9]{64})$/", + "source" => %{"pointer" => "/transaction_hash_param"}, + "title" => "Invalid value" + } + ] + } = json_response(request, 422) + end + + test "return empty list when no FHE operations", %{conn: conn} do + transaction = + :transaction + |> insert() + |> with_block() + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/fhe-operations") + + assert response = json_response(request, 200) + assert response["items"] == [] + assert response["total_hcu"] == 0 + assert response["max_depth_hcu"] == 0 + assert response["operation_count"] == 0 + end + + test "return FHE operations for transaction", %{conn: conn} do + transaction = + :transaction + |> insert() + |> with_block() + + caller = insert(:address) + + fhe_operation_1 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + caller: caller.hash, + hcu_cost: 100, + hcu_depth: 1 + ) + + fhe_operation_2 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 2, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + caller: caller.hash, + hcu_cost: 200, + hcu_depth: 2 + ) + + # Create another transaction with FHE operations to ensure filtering works + transaction_2 = + :transaction + |> insert() + |> with_block() + + insert(:fhe_operation, + transaction_hash: transaction_2.hash, + log_index: 1, + block_hash: transaction_2.block.hash, + block_number: transaction_2.block_number + ) + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/fhe-operations") + + assert response = json_response(request, 200) + assert Enum.count(response["items"]) == 2 + assert response["total_hcu"] == 300 + assert response["max_depth_hcu"] == 2 + assert response["operation_count"] == 2 + + # Check first operation + operation_1 = Enum.at(response["items"], 0) + assert operation_1["log_index"] == fhe_operation_1.log_index + assert operation_1["operation"] == fhe_operation_1.operation + assert operation_1["type"] == fhe_operation_1.operation_type + assert operation_1["fhe_type"] == fhe_operation_1.fhe_type + assert operation_1["is_scalar"] == fhe_operation_1.is_scalar + assert operation_1["hcu_cost"] == fhe_operation_1.hcu_cost + assert operation_1["hcu_depth"] == fhe_operation_1.hcu_depth + assert operation_1["block_number"] == fhe_operation_1.block_number + assert operation_1["caller"] != nil + assert operation_1["caller"]["hash"] == Address.checksum(caller.hash) + assert operation_1["result"] == "0x" <> Base.encode16(fhe_operation_1.result_handle, case: :lower) + assert operation_1["inputs"] == fhe_operation_1.input_handles + + # Check second operation + operation_2 = Enum.at(response["items"], 1) + assert operation_2["log_index"] == fhe_operation_2.log_index + assert operation_2["hcu_cost"] == fhe_operation_2.hcu_cost + assert operation_2["hcu_depth"] == fhe_operation_2.hcu_depth + end + + test "return FHE operations without caller", %{conn: conn} do + transaction = + :transaction + |> insert() + |> with_block() + + fhe_operation = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + caller: nil + ) + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/fhe-operations") + + assert response = json_response(request, 200) + assert Enum.count(response["items"]) == 1 + + operation = Enum.at(response["items"], 0) + assert operation["caller"] == nil + assert operation["log_index"] == fhe_operation.log_index + end + + test "return FHE operations ordered by log_index", %{conn: conn} do + transaction = + :transaction + |> insert() + |> with_block() + + # Insert operations in non-sequential order + fhe_operation_3 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 3, + block_hash: transaction.block.hash, + block_number: transaction.block_number + ) + + fhe_operation_1 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: transaction.block.hash, + block_number: transaction.block_number + ) + + fhe_operation_2 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 2, + block_hash: transaction.block.hash, + block_number: transaction.block_number + ) + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/fhe-operations") + + assert response = json_response(request, 200) + assert Enum.count(response["items"]) == 3 + + # Verify ordering + assert Enum.at(response["items"], 0)["log_index"] == fhe_operation_1.log_index + assert Enum.at(response["items"], 1)["log_index"] == fhe_operation_2.log_index + assert Enum.at(response["items"], 2)["log_index"] == fhe_operation_3.log_index + end + end + describe "/transactions/{transaction_hash}/state-changes" do test "return 404 on non existing transaction", %{conn: conn} do transaction = build(:transaction) diff --git a/apps/explorer/lib/explorer/chain/fhe/fhe_contract_checker.ex b/apps/explorer/lib/explorer/chain/fhe/fhe_contract_checker.ex new file mode 100644 index 000000000000..8ab87592d1fe --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fhe/fhe_contract_checker.ex @@ -0,0 +1,160 @@ +defmodule Explorer.Chain.FheContractChecker do + @moduledoc """ + Helper module to check if a contract is a Confidential/FHE contract. + Uses the confidentialProtocolId() function to determine if a contract uses FHE. + """ + require Logger + + import Ecto.Query, only: [from: 2] + import Explorer.Chain, only: [select_repo: 1] + import EthereumJSONRPC, only: [json_rpc: 2] + + alias EthereumJSONRPC.Contract + alias Explorer.Chain.{Address, Hash} + alias Explorer.Repo + alias Explorer.Tags.{AddressTag, AddressToTag} + + @confidential_protocol_id_selector "0x8927b030" + @fhe_tag_label "fhe" + @fhe_tag_display_name "FHE" + + @doc """ + Checks if a contract is FHE and saves the result as a tag in the database. + """ + @spec check_and_save_fhe_status(Hash.Address.t() | nil, Keyword.t()) :: :ok | :empty | :error | :already_checked + def check_and_save_fhe_status(address_hash, options \\ []) + + def check_and_save_fhe_status(address_hash, options) when not is_nil(address_hash) do + address = Address.get(address_hash, options) + + cond do + not Address.smart_contract?(address) or is_nil(address.contract_code) -> :empty + already_checked?(address_hash, options) -> :already_checked + true -> do_check_and_save_fhe_status(address_hash, options) + end + end + + def check_and_save_fhe_status(_, _), do: :empty + + defp do_check_and_save_fhe_status(address_hash, options) do + case fhe_contract?(address_hash) do + {:ok, true} -> save_fhe_tag(address_hash, options) + {:ok, false} -> :ok + end + end + + @doc """ + Checks if a contract is a Confidential/FHE contract by calling confidentialProtocolId() + """ + @spec fhe_contract?(Hash.Address.t()) :: {:ok, boolean()} + def fhe_contract?(%Hash{byte_count: 20} = address_hash) do + address_string = Hash.to_string(address_hash) + + request = Contract.eth_call_request(@confidential_protocol_id_selector, address_string, 0, nil, nil) + json_rpc_named_arguments = Application.get_env(:explorer, :json_rpc_named_arguments) + + case json_rpc(request, json_rpc_named_arguments) do + {:ok, result} when is_binary(result) -> + is_fhe = decode_uint256(result) != 0 + {:ok, is_fhe} + + {:ok, [%{result: result}]} when is_binary(result) -> + is_fhe = decode_uint256(result) != 0 + {:ok, is_fhe} + + {:ok, _other} -> + {:ok, false} + + {:error, _reason} -> + # Treat RPC error as false to avoid crashing/retrying loop, or return error? + {:ok, false} + end + end + + @spec fhe_contract?(binary()) :: {:ok, boolean()} | {:error, :invalid_hash} + def fhe_contract?(address_hash_string) when is_binary(address_hash_string) do + case Hash.Address.cast(address_hash_string) do + {:ok, address_hash} -> + fhe_contract?(address_hash) + + _ -> + {:error, :invalid_hash} + end + end + + @doc """ + Checks if an address has already been tagged as FHE. + """ + @spec already_checked?(Hash.Address.t(), Keyword.t()) :: boolean() + def already_checked?(address_hash, options) do + tag_id = AddressTag.get_id_by_label(@fhe_tag_label) + + if tag_id do + repo = select_repo(options) + str_hash = Hash.to_string(address_hash) + + query = + from(att in AddressToTag, + where: att.tag_id == ^tag_id and att.address_hash == ^str_hash + ) + + repo.exists?(query) + else + false + end + end + + defp save_fhe_tag(address_hash, _options) do + ensure_fhe_tag_exists() + + case AddressTag.get_id_by_label(@fhe_tag_label) do + nil -> :error + tag_id -> insert_tag_mapping(address_hash, tag_id) + end + end + + defp insert_tag_mapping(address_hash, tag_id) do + params = %{ + address_hash: address_hash, + tag_id: tag_id, + inserted_at: DateTime.utc_now(), + updated_at: DateTime.utc_now() + } + + # We use Repo.insert_all with on_conflict: :nothing to be safe and idempotent + Repo.insert_all(AddressToTag, [params], on_conflict: :nothing, conflict_target: [:address_hash, :tag_id]) + :ok + rescue + e -> + Logger.error("Failed to insert FHE tag mapping: #{inspect(e)}") + :error + end + + defp ensure_fhe_tag_exists do + case AddressTag.set(@fhe_tag_label, @fhe_tag_display_name) do + {:ok, _} -> + :ok + + {:error, + %Ecto.Changeset{errors: [label: {_, [constraint: :unique, constraint_name: "address_tags_label_index"]}]}} -> + :ok + + _ -> + :error + end + end + + defp decode_uint256("0x" <> hex), do: decode_hex(hex) + defp decode_uint256(hex) when is_binary(hex), do: decode_hex(hex) + + defp decode_hex(""), do: 0 + + defp decode_hex(hex) do + hex + |> String.trim_leading("0") + |> case do + "" -> 0 + val -> String.to_integer(val, 16) + end + end +end diff --git a/apps/explorer/lib/explorer/chain/fhe/fhe_operation.ex b/apps/explorer/lib/explorer/chain/fhe/fhe_operation.ex new file mode 100644 index 000000000000..ff8ddd571e68 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fhe/fhe_operation.ex @@ -0,0 +1,104 @@ +defmodule Explorer.Chain.FheOperation do + @moduledoc """ + Represents a single FHE (Fully Homomorphic Encryption) operation within a transaction. + FHE operations are parsed from transaction logs during block indexing. + """ + + use Explorer.Schema + + import Ecto.Query, only: [from: 2] + + alias Explorer.Chain.{Block, Hash, Transaction} + alias Explorer.Repo + + @primary_key false + typed_schema "fhe_operations" do + # Composite primary key + field(:log_index, :integer, primary_key: true, null: false) + + # Foreign keys + belongs_to(:transaction, Transaction, + foreign_key: :transaction_hash, + primary_key: true, + references: :hash, + type: Hash.Full, + null: false + ) + + belongs_to(:block, Block, + foreign_key: :block_hash, + references: :hash, + type: Hash.Full, + null: false + ) + + # Operation details + field(:operation, :string, null: false) + field(:operation_type, :string, null: false) + field(:fhe_type, :string, null: false) + field(:is_scalar, :boolean, null: false) + + # HCU metrics + field(:hcu_cost, :integer, null: false) + field(:hcu_depth, :integer, null: false) + + # Addresses and handles + field(:caller, Hash.Address) + field(:result_handle, :binary, null: false) + field(:input_handles, :map) + + # Metadata + field(:block_number, :integer, null: false) + + timestamps() + end + + @required_attrs ~w(transaction_hash log_index block_hash block_number operation operation_type fhe_type is_scalar hcu_cost hcu_depth result_handle)a + @optional_attrs ~w(caller input_handles)a + + @doc false + def changeset(fhe_operation, attrs) do + fhe_operation + |> cast(attrs, @required_attrs ++ @optional_attrs) + |> validate_required(@required_attrs) + end + + @doc """ + Returns all FHE operations for a given transaction hash, ordered by log_index. + """ + @spec by_transaction_hash(Hash.Full.t()) :: [t()] + def by_transaction_hash(transaction_hash) do + query = + from( + op in __MODULE__, + where: op.transaction_hash == ^transaction_hash, + order_by: [asc: op.log_index] + ) + + Repo.all(query) + end + + @doc """ + Returns transaction-level metrics for a given transaction. + Returns a map with operation_count, total_hcu, and max_depth_hcu. + """ + @spec transaction_metrics(Hash.Full.t()) :: %{ + operation_count: non_neg_integer(), + total_hcu: non_neg_integer(), + max_depth_hcu: non_neg_integer() + } + def transaction_metrics(transaction_hash) do + query = + from( + op in __MODULE__, + where: op.transaction_hash == ^transaction_hash, + select: %{ + operation_count: count(op.log_index), + total_hcu: coalesce(sum(op.hcu_cost), 0), + max_depth_hcu: coalesce(max(op.hcu_depth), 0) + } + ) + + Repo.one(query) || %{operation_count: 0, total_hcu: 0, max_depth_hcu: 0} + end +end diff --git a/apps/explorer/lib/explorer/chain/fhe/fhe_operator_prices.ex b/apps/explorer/lib/explorer/chain/fhe/fhe_operator_prices.ex new file mode 100644 index 000000000000..affa8d25f5c0 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fhe/fhe_operator_prices.ex @@ -0,0 +1,582 @@ +defmodule Explorer.Chain.FheOperatorPrices do + @moduledoc """ + HCU (Homomorphic Computation Units) price data for FHE operations. + Ported from https://github.com/zama-ai/fhevm/blob/main/library-solidity/codegen/src/operatorsPrices.ts + """ + + @operator_prices %{ + "fheAdd" => %{ + scalar: %{ + "Uint8" => 84_000, + "Uint16" => 93_000, + "Uint32" => 95_000, + "Uint64" => 133_000, + "Uint128" => 172_000 + }, + non_scalar: %{ + "Uint8" => 88_000, + "Uint16" => 93_000, + "Uint32" => 125_000, + "Uint64" => 162_000, + "Uint128" => 259_000 + } + }, + "fheSub" => %{ + scalar: %{ + "Uint8" => 84_000, + "Uint16" => 93_000, + "Uint32" => 95_000, + "Uint64" => 133_000, + "Uint128" => 172_000 + }, + non_scalar: %{ + "Uint8" => 91_000, + "Uint16" => 93_000, + "Uint32" => 125_000, + "Uint64" => 162_000, + "Uint128" => 260_000 + } + }, + "fheMul" => %{ + scalar: %{ + "Uint8" => 122_000, + "Uint16" => 193_000, + "Uint32" => 265_000, + "Uint64" => 365_000, + "Uint128" => 696_000 + }, + non_scalar: %{ + "Uint8" => 150_000, + "Uint16" => 222_000, + "Uint32" => 328_000, + "Uint64" => 596_000, + "Uint128" => 1_686_000 + } + }, + "fheDiv" => %{ + scalar: %{ + "Uint8" => 210_000, + "Uint16" => 302_000, + "Uint32" => 438_000, + "Uint64" => 715_000, + "Uint128" => 1_225_000 + } + }, + "fheRem" => %{ + scalar: %{ + "Uint8" => 440_000, + "Uint16" => 580_000, + "Uint32" => 792_000, + "Uint64" => 1_153_000, + "Uint128" => 1_943_000 + } + }, + "fheBitAnd" => %{ + scalar: %{ + "Bool" => 22_000, + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + }, + non_scalar: %{ + "Bool" => 25_000, + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + } + }, + "fheBitOr" => %{ + scalar: %{ + "Bool" => 22_000, + "Uint8" => 30_000, + "Uint16" => 30_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + }, + non_scalar: %{ + "Bool" => 24_000, + "Uint8" => 30_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + } + }, + "fheBitXor" => %{ + scalar: %{ + "Bool" => 22_000, + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 39_000 + }, + non_scalar: %{ + "Bool" => 22_000, + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 39_000 + } + }, + "fheShl" => %{ + scalar: %{ + "Uint8" => 32_000, + "Uint16" => 32_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 39_000 + }, + non_scalar: %{ + "Uint8" => 92_000, + "Uint16" => 125_000, + "Uint32" => 162_000, + "Uint64" => 208_000, + "Uint128" => 272_000, + "Uint256" => 378_000 + } + }, + "fheShr" => %{ + scalar: %{ + "Uint8" => 32_000, + "Uint16" => 32_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + }, + non_scalar: %{ + "Uint8" => 91_000, + "Uint16" => 123_000, + "Uint32" => 163_000, + "Uint64" => 209_000, + "Uint128" => 272_000, + "Uint256" => 369_000 + } + }, + "fheRotl" => %{ + scalar: %{ + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 38_000 + }, + non_scalar: %{ + "Uint8" => 91_000, + "Uint16" => 125_000, + "Uint32" => 163_000, + "Uint64" => 209_000, + "Uint128" => 278_000, + "Uint256" => 378_000 + } + }, + "fheRotr" => %{ + scalar: %{ + "Uint8" => 31_000, + "Uint16" => 31_000, + "Uint32" => 32_000, + "Uint64" => 34_000, + "Uint128" => 37_000, + "Uint256" => 40_000 + }, + non_scalar: %{ + "Uint8" => 93_000, + "Uint16" => 125_000, + "Uint32" => 160_000, + "Uint64" => 209_000, + "Uint128" => 283_000, + "Uint256" => 375_000 + } + }, + "fheEq" => %{ + scalar: %{ + "Bool" => 25_000, + "Uint8" => 55_000, + "Uint16" => 55_000, + "Uint32" => 82_000, + "Uint64" => 83_000, + "Uint128" => 117_000, + "Uint160" => 117_000, + "Uint256" => 118_000 + }, + non_scalar: %{ + "Bool" => 26_000, + "Uint8" => 55_000, + "Uint16" => 83_000, + "Uint32" => 86_000, + "Uint64" => 120_000, + "Uint128" => 122_000, + "Uint160" => 137_000, + "Uint256" => 152_000 + } + }, + "fheNe" => %{ + scalar: %{ + "Bool" => 23_000, + "Uint8" => 55_000, + "Uint16" => 55_000, + "Uint32" => 83_000, + "Uint64" => 84_000, + "Uint128" => 117_000, + "Uint160" => 117_000, + "Uint256" => 117_000 + }, + non_scalar: %{ + "Bool" => 23_000, + "Uint8" => 55_000, + "Uint16" => 83_000, + "Uint32" => 85_000, + "Uint64" => 118_000, + "Uint128" => 122_000, + "Uint160" => 136_000, + "Uint256" => 150_000 + } + }, + "fheGe" => %{ + scalar: %{ + "Uint8" => 52_000, + "Uint16" => 55_000, + "Uint32" => 84_000, + "Uint64" => 116_000, + "Uint128" => 149_000 + }, + non_scalar: %{ + "Uint8" => 63_000, + "Uint16" => 84_000, + "Uint32" => 118_000, + "Uint64" => 152_000, + "Uint128" => 210_000 + } + }, + "fheGt" => %{ + scalar: %{ + "Uint8" => 52_000, + "Uint16" => 55_000, + "Uint32" => 84_000, + "Uint64" => 117_000, + "Uint128" => 150_000 + }, + non_scalar: %{ + "Uint8" => 59_000, + "Uint16" => 84_000, + "Uint32" => 118_000, + "Uint64" => 152_000, + "Uint128" => 218_000 + } + }, + "fheLe" => %{ + scalar: %{ + "Uint8" => 58_000, + "Uint16" => 58_000, + "Uint32" => 84_000, + "Uint64" => 119_000, + "Uint128" => 150_000 + }, + non_scalar: %{ + "Uint8" => 58_000, + "Uint16" => 83_000, + "Uint32" => 117_000, + "Uint64" => 149_000, + "Uint128" => 218_000 + } + }, + "fheLt" => %{ + scalar: %{ + "Uint8" => 52_000, + "Uint16" => 58_000, + "Uint32" => 83_000, + "Uint64" => 118_000, + "Uint128" => 149_000 + }, + non_scalar: %{ + "Uint8" => 59_000, + "Uint16" => 84_000, + "Uint32" => 117_000, + "Uint64" => 146_000, + "Uint128" => 215_000 + } + }, + "fheMin" => %{ + scalar: %{ + "Uint8" => 84_000, + "Uint16" => 88_000, + "Uint32" => 117_000, + "Uint64" => 150_000, + "Uint128" => 186_000 + }, + non_scalar: %{ + "Uint8" => 119_000, + "Uint16" => 146_000, + "Uint32" => 182_000, + "Uint64" => 219_000, + "Uint128" => 289_000 + } + }, + "fheMax" => %{ + scalar: %{ + "Uint8" => 89_000, + "Uint16" => 89_000, + "Uint32" => 117_000, + "Uint64" => 149_000, + "Uint128" => 180_000 + }, + non_scalar: %{ + "Uint8" => 121_000, + "Uint16" => 145_000, + "Uint32" => 180_000, + "Uint64" => 218_000, + "Uint128" => 290_000 + } + }, + "fheNeg" => %{ + types: %{ + "Uint8" => 79_000, + "Uint16" => 93_000, + "Uint32" => 95_000, + "Uint64" => 131_000, + "Uint128" => 168_000, + "Uint256" => 269_000 + } + }, + "fheNot" => %{ + types: %{ + "Bool" => 2, + "Uint8" => 9, + "Uint16" => 16, + "Uint32" => 32, + "Uint64" => 63, + "Uint128" => 130, + "Uint256" => 130 + } + }, + "cast" => %{ + types: %{ + "Bool" => 32, + "Uint8" => 32, + "Uint16" => 32, + "Uint32" => 32, + "Uint64" => 32, + "Uint128" => 32, + "Uint256" => 32 + } + }, + "trivialEncrypt" => %{ + types: %{ + "Bool" => 32, + "Uint8" => 32, + "Uint16" => 32, + "Uint32" => 32, + "Uint64" => 32, + "Uint128" => 32, + "Uint160" => 32, + "Uint256" => 32 + } + }, + "ifThenElse" => %{ + types: %{ + "Bool" => 55_000, + "Uint8" => 55_000, + "Uint16" => 55_000, + "Uint32" => 55_000, + "Uint64" => 55_000, + "Uint128" => 57_000, + "Uint160" => 83_000, + "Uint256" => 108_000 + } + }, + "fheRand" => %{ + types: %{ + "Bool" => 19_000, + "Uint8" => 23_000, + "Uint16" => 23_000, + "Uint32" => 24_000, + "Uint64" => 24_000, + "Uint128" => 25_000, + "Uint256" => 30_000 + } + }, + "fheRandBounded" => %{ + types: %{ + "Uint8" => 23_000, + "Uint16" => 23_000, + "Uint32" => 24_000, + "Uint64" => 24_000, + "Uint128" => 25_000, + "Uint256" => 30_000 + } + } + } + + @type_mapping %{ + # Mapping matches TypeScript ALL_FHE_TYPE_INFOS from fhevm/library-solidity/codegen/src/fheTypeInfos.ts + 0 => "Bool", + 1 => "Uint4", + 2 => "Uint8", + 3 => "Uint16", + 4 => "Uint32", + 5 => "Uint64", + 6 => "Uint128", + 7 => "Uint160", + 8 => "Uint256", + 9 => "Uint512", + 10 => "Uint1024", + 11 => "Uint2048", + 12 => "Uint2", + 13 => "Uint6", + 14 => "Uint10", + 15 => "Uint12", + 16 => "Uint14", + 17 => "Int2", + 18 => "Int4", + 19 => "Int6", + 20 => "Int8", + 21 => "Int10", + 22 => "Int12", + 23 => "Int14", + 24 => "Int16", + 25 => "Int32", + 26 => "Int64", + 27 => "Int128", + 28 => "Int160", + 29 => "Int256", + 30 => "AsciiString", + 31 => "Int512", + 32 => "Int1024", + 33 => "Int2048", + 34 => "Uint24", + 35 => "Uint40", + 36 => "Uint48", + 37 => "Uint56", + 38 => "Uint72", + 39 => "Uint80", + 40 => "Uint88", + 41 => "Uint96", + 42 => "Uint104", + 43 => "Uint112", + 44 => "Uint120", + 45 => "Uint136", + 46 => "Uint144", + 47 => "Uint152", + 48 => "Uint168", + 49 => "Uint176", + 50 => "Uint184", + 51 => "Uint192", + 52 => "Uint200", + 53 => "Uint208", + 54 => "Uint216", + 55 => "Uint224", + 56 => "Uint232", + 57 => "Uint240", + 58 => "Uint248", + 59 => "Int24", + 60 => "Int40", + 61 => "Int48", + 62 => "Int56", + 63 => "Int72", + 64 => "Int80", + 65 => "Int88", + 66 => "Int96", + 67 => "Int104", + 68 => "Int112", + 69 => "Int120", + 70 => "Int136", + 71 => "Int144", + 72 => "Int152", + 73 => "Int168", + 74 => "Int176", + 75 => "Int184", + 76 => "Int192", + 77 => "Int200", + 78 => "Int208", + 79 => "Int216", + 80 => "Int224", + 81 => "Int232", + 82 => "Int240", + 83 => "Int248" + } + + @doc """ + Returns the HCU price for an FHE operation. + + Looks up the operation in the price table by operation name, FHE type, and + scalar flag. Returns 0 if not found. + + ## Parameters + - `operation_name` - Operation name (e.g. "fheAdd", "fheSub"). + - `fhe_type` - FHE type string (e.g. "Uint8", "Uint16"). + - `is_scalar` - Whether the operation is scalar (default false). + + ## Returns + - `non_neg_integer()` - HCU cost, or 0 if unknown. + """ + @spec get_price(String.t() | atom(), String.t() | atom(), boolean()) :: non_neg_integer() + def get_price(operation_name, fhe_type, is_scalar \\ false) do + case @operator_prices[operation_name] do + %{scalar: scalar_prices, non_scalar: non_scalar_prices} -> + prices = if is_scalar, do: scalar_prices, else: non_scalar_prices + Map.get(prices, fhe_type, 0) + + %{scalar: scalar_prices} -> + Map.get(scalar_prices, fhe_type, 0) + + %{types: type_prices} -> + Map.get(type_prices, fhe_type, 0) + + _ -> + 0 + end + end + + @doc """ + Converts a type index to its FHE type name string. + + ## Parameters + - `type_index` - Integer type index (e.g. 1 for Uint8). + + ## Returns + - `String.t()` - Type name (e.g. "Uint8"), or "Unknown" if not found. + """ + @spec get_type_name(integer()) :: String.t() + def get_type_name(type_index) when is_integer(type_index) do + Map.get(@type_mapping, type_index, "Unknown") + end + + @doc """ + Returns the full operator price map. + + ## Parameters + - None. + + ## Returns + - `map()` - Map of operation names to price structures (scalar/non_scalar). + """ + @spec all_prices() :: map() + def all_prices, do: @operator_prices + + @doc """ + Returns the mapping from type index to type name. + + ## Parameters + - None. + + ## Returns + - `%{integer() => String.t()}` - Map of type index to type name string. + """ + @spec type_mapping() :: %{integer() => String.t()} + def type_mapping, do: @type_mapping +end diff --git a/apps/explorer/lib/explorer/chain/fhe/parser.ex b/apps/explorer/lib/explorer/chain/fhe/parser.ex new file mode 100644 index 000000000000..1be65042ea4d --- /dev/null +++ b/apps/explorer/lib/explorer/chain/fhe/parser.ex @@ -0,0 +1,476 @@ +defmodule Explorer.Chain.Fhe.Parser do + @moduledoc """ + Logic for parsing FHE operations from transaction logs. + """ + + require Logger + + alias Explorer.Chain.{FheOperatorPrices, Hash, Log} + alias Explorer.Helper + + # FHE Event Signatures (Keccak-256 hashes) + @fhe_add_event "0xdb9050d65240431621d61d6f94b970e63f53a67a5766614ee6e5c5bbd41c8e2e" + @fhe_sub_event "0xeb6d37bd271abe1395b21d6d78f3487d6584862872c29ffd3f90736ee99b7393" + @fhe_mul_event "0x215346a4f9f975e6d5484e290bd4e53ca14453a9d282ebd3ccedb2a0f171753d" + @fhe_div_event "0x3bab2ee0e2f90f4690c6a87bf63cf1a6b626086e95f231860b152966e8dabbf7" + @fhe_rem_event "0x0e691cd0bf8c4e9308e4ced1bb9c964117dc5c5bb9b9ab5bdfebf2c9b13a897c" + @fhe_bit_and_event "0xe42486b0ccdbef81a2075c48c8e515c079aea73c8b82429997c72a2fe1bf4fef" + @fhe_bit_or_event "0x56df279bbfb03d9ed097bbe2f28d520ca0c1161206327926e98664d70d2c24c4" + @fhe_bit_xor_event "0x4d32284bd3193ecaa44e1ceca32f41c5d6c32803a92e07967dd3ee4229721582" + @fhe_shl_event "0xe84282aaebcca698443e39a2a948a345d0d2ebc654af5cb657a2d7e8053bf6cb" + @fhe_shr_event "0x324220bfc9cb158b492991c03c309cd86e5345cac45aacae2092ddabe31fa3d8" + @fhe_rotl_event "0xeb0e4f8dc74058194d0602425fe602f955c222200f7f10c6fe67992f7b24c7e9" + @fhe_rotr_event "0xc148675905d07ad5496f8ef4d8195c907503f3ec12fd10ed5f21240abc693634" + @fhe_eq_event "0xb3d5c664ec86575818e8d75ff25c5f867250df8954088549c41c848cd10e76cb" + @fhe_ne_event "0x6960c1e88f61c352dba34d1bbf6753e302795264d5d8ae82f7983c7004651e5d" + @fhe_ge_event "0x38c3a63c4230de5b741f494ffb54e3087104030279bc7bccee8ad9ad31712b21" + @fhe_gt_event "0xc9ff8f0d18a3f766ce5de3de216076050140e4fc2652f5e0e745f6fc836cda8b" + @fhe_le_event "0xdef2e704a077284a07f3d0b436db88f5d981b69f58ab7c1ae623252718a6de01" + @fhe_lt_event "0x0d483b100d8c73b208984ec697caa3091521ee5525ce69edcf97d7e395d3d059" + @fhe_min_event "0xc11d62b13c360a83082487064be1ec0878b2f0be4f012bf59f89e128063d47ff" + @fhe_max_event "0xfd7c9208f956bf0c6ab76a667f04361245ad3e0a2d0eff92eb827acfcca68ea9" + @fhe_neg_event "0x8c664d3c3ca583fc5803b8a91c49644bbd9550bfa87967c73ad1de83027768c0" + @fhe_not_event "0x55aff4cc7a3d160c83f1f15b818011ede841a0b4597fb14dcd3603df3a11e5e0" + @trivial_encrypt_event "0x063ccd1bba45151d91f6a418065047a3d048d058a922535747bb2b575a01d236" + @cast_event "0x31ccae6a2f8e3ced1692f77c8f668133e4afdaaa35afe844ff4659a6c27e627f" + @fhe_if_then_else_event "0x60be9d61aad849facc28c38b048cb5c4be3420b8fa2233e08cfa06be1b6d1c3e" + @fhe_rand_event "0x0c8aca6017003326051e19913ef02631f24b801125e1fa8a1d812e868319fda6" + @fhe_rand_bounded_event "0x5222d96b836727a1d6fe1ee9aef27f9bb507bd41794defa376ff6c648aaf8ff1" + + @binary_operations [ + @fhe_add_event, + @fhe_sub_event, + @fhe_mul_event, + @fhe_div_event, + @fhe_rem_event, + @fhe_bit_and_event, + @fhe_bit_or_event, + @fhe_bit_xor_event, + @fhe_shl_event, + @fhe_shr_event, + @fhe_rotl_event, + @fhe_rotr_event, + @fhe_eq_event, + @fhe_ne_event, + @fhe_ge_event, + @fhe_gt_event, + @fhe_le_event, + @fhe_lt_event, + @fhe_min_event, + @fhe_max_event + ] + + @unary_operations [@fhe_neg_event, @fhe_not_event] + + @all_fhe_events @binary_operations ++ + @unary_operations ++ + [ + @trivial_encrypt_event, + @cast_event, + @fhe_if_then_else_event, + @fhe_rand_event, + @fhe_rand_bounded_event + ] + + @event_names %{ + @fhe_add_event => "FheAdd", + @fhe_sub_event => "FheSub", + @fhe_mul_event => "FheMul", + @fhe_div_event => "FheDiv", + @fhe_rem_event => "FheRem", + @fhe_bit_and_event => "FheBitAnd", + @fhe_bit_or_event => "FheBitOr", + @fhe_bit_xor_event => "FheBitXor", + @fhe_shl_event => "FheShl", + @fhe_shr_event => "FheShr", + @fhe_rotl_event => "FheRotl", + @fhe_rotr_event => "FheRotr", + @fhe_eq_event => "FheEq", + @fhe_ne_event => "FheNe", + @fhe_ge_event => "FheGe", + @fhe_gt_event => "FheGt", + @fhe_le_event => "FheLe", + @fhe_lt_event => "FheLt", + @fhe_min_event => "FheMin", + @fhe_max_event => "FheMax", + @fhe_neg_event => "FheNeg", + @fhe_not_event => "FheNot", + @trivial_encrypt_event => "TrivialEncrypt", + @cast_event => "Cast", + @fhe_if_then_else_event => "FheIfThenElse", + @fhe_rand_event => "FheRand", + @fhe_rand_bounded_event => "FheRandBounded" + } + + @doc """ + Returns the list of all FHE event topic signatures. + + ## Parameters + - None. + + ## Returns + - List of binary event topic hashes (Keccak-256 of event signatures). + """ + def all_fhe_events, do: @all_fhe_events + + @doc """ + Maps an event topic hash to its human-readable event name. + + ## Parameters + - `topic` - Event topic (binary or Hash struct). + + ## Returns + - `String.t()` - Event name (e.g. "FheAdd"), or "Unknown" if not found. + """ + def get_event_name(topic) do + normalized_topic = String.downcase(to_string(topic)) + Map.get(@event_names, normalized_topic, "Unknown") + end + + @doc """ + Extracts the caller address from an indexed topic (32-byte padded address). + + Parses the last 20 bytes of a 32-byte topic as an Ethereum address. Returns nil + for invalid or too-short input. + + ## Parameters + - `topic` - Topic value (binary, Hash struct, or string). + + ## Returns + - `String.t()` - "0x" prefixed hex address, or nil if extraction fails. + """ + def extract_caller(nil), do: nil + + def extract_caller(topic) when is_binary(topic) and byte_size(topic) < 32, do: nil + + def extract_caller(topic) do + case topic do + %Hash{} = hash -> + if byte_size(hash.bytes) == 32 do + <<_::binary-size(12), address_bytes::binary-size(20)>> = hash.bytes + "0x" <> Base.encode16(address_bytes, case: :lower) + else + nil + end + + binary when is_binary(binary) -> + if byte_size(binary) >= 32 do + <<_::binary-size(12), address_bytes::binary-size(20)>> = binary + "0x" <> Base.encode16(address_bytes, case: :lower) + else + nil + end + + _ -> + extract_caller_from_topic_str(topic) + end + end + + defp extract_caller_from_topic_str(topic) do + topic_str = topic |> to_string() |> String.downcase() + + cond do + String.starts_with?(topic_str, "0x") -> "0x" <> String.slice(topic_str, -40, 40) + String.length(topic_str) >= 40 -> "0x" <> String.slice(topic_str, -40, 40) + true -> topic_str + end + end + + @doc """ + Decodes FHE event log data based on event type. + + Parses the `data` field of a log according to the event's ABI structure + (binary ops: lhs, rhs, scalar_byte, result; unary ops: ct, result; etc.). + + ## Parameters + - `log` - Log struct with `:data` field. + - `event_name` - Event name string (e.g. "FheAdd", "FheNeg"). + + ## Returns + - Map with decoded fields (e.g. `%{lhs: _, rhs: _, result: _}`). + """ + def decode_event_data(%{data: data} = _log, event_name) + when event_name in [ + "FheAdd", + "FheSub", + "FheMul", + "FheDiv", + "FheRem", + "FheBitAnd", + "FheBitOr", + "FheBitXor", + "FheShl", + "FheShr", + "FheRotl", + "FheRotr", + "FheEq", + "FheNe", + "FheGe", + "FheGt", + "FheLe", + "FheLt", + "FheMin", + "FheMax" + ] do + # Binary operations: (bytes32 lhs, bytes32 rhs, bytes1 scalarByte, bytes32 result) + [lhs, rhs, scalar_byte, result] = Helper.decode_data(data, [{:bytes, 32}, {:bytes, 32}, {:bytes, 1}, {:bytes, 32}]) + + %{ + lhs: lhs, + rhs: rhs, + scalar_byte: scalar_byte, + result: result + } + end + + def decode_event_data(%{data: data} = _log, event_name) when event_name in ["FheNeg", "FheNot"] do + # Unary operations: (bytes32 ct, bytes32 result) + [ct, result] = Helper.decode_data(data, [{:bytes, 32}, {:bytes, 32}]) + + %{ + ct: ct, + result: result + } + end + + def decode_event_data(%{data: data} = _log, "TrivialEncrypt") do + # TrivialEncrypt(address indexed caller, uint256 pt, uint8 toType, bytes32 result) + [pt, to_type, result] = Helper.decode_data(data, [{:uint, 256}, {:uint, 8}, {:bytes, 32}]) + + %{ + plaintext: pt, + to_type: to_type, + result: result + } + end + + def decode_event_data(%{data: data} = _log, "Cast") do + # Cast(address indexed caller, bytes32 ct, uint8 toType, bytes32 result) + [ct, to_type, result] = Helper.decode_data(data, [{:bytes, 32}, {:uint, 8}, {:bytes, 32}]) + + %{ + ct: ct, + to_type: to_type, + result: result + } + end + + def decode_event_data(%{data: data} = _log, "FheIfThenElse") do + # FheIfThenElse(address indexed caller, bytes32 control, bytes32 ifTrue, bytes32 ifFalse, bytes32 result) + [control, if_true, if_false, result] = + Helper.decode_data(data, [{:bytes, 32}, {:bytes, 32}, {:bytes, 32}, {:bytes, 32}]) + + %{ + control: control, + if_true: if_true, + if_false: if_false, + result: result + } + end + + def decode_event_data(%{data: data} = _log, "FheRand") do + # FheRand(address indexed caller, uint8 randType, bytes16 seed, bytes32 result) + [rand_type, seed, result] = Helper.decode_data(data, [{:uint, 8}, {:bytes, 16}, {:bytes, 32}]) + + %{ + rand_type: rand_type, + seed: seed, + result: result + } + end + + def decode_event_data(%{data: data} = _log, "FheRandBounded") do + # FheRandBounded(address indexed caller, uint256 upperBound, uint8 randType, bytes16 seed, bytes32 result) + [upper_bound, rand_type, seed, result] = + Helper.decode_data(data, [{:uint, 256}, {:uint, 8}, {:bytes, 16}, {:bytes, 32}]) + + %{ + upper_bound: upper_bound, + rand_type: rand_type, + seed: seed, + result: result + } + end + + def decode_event_data(%Log{} = _log, _event_name) do + %{result: <<0::256>>} + end + + @doc """ + Type extraction rules (matching TypeScript implementation): + - FheAdd, FheSub, FheMul, FheDiv, FheRem, FheBitAnd, FheBitOr, FheBitXor, + FheShl, FheShr, FheRotl, FheRotr, FheMin, FheMax, FheIfThenElse: from result handle + - FheEq, FheNe, FheGe, FheGt, FheLe, FheLt: from LHS handle (not result!) + - Cast, FheNot, FheNeg: from input handle (ct) + - TrivialEncrypt: from toType parameter + - FheRand: from randType parameter + - FheRandBounded: from randType parameter + """ + def extract_fhe_type(operation_data, event_name) do + case event_name do + event_name when event_name in ["FheEq", "FheNe", "FheGe", "FheGt", "FheLe", "FheLt"] -> + extract_fhe_type_from_handle(operation_data[:lhs], &extract_fhe_type_from_result/1) + + event_name when event_name in ["Cast", "FheNot", "FheNeg"] -> + extract_fhe_type_from_handle(operation_data[:ct], &extract_fhe_type_from_result/1) + + "TrivialEncrypt" -> + extract_fhe_type_from_param(operation_data[:to_type]) + + "FheRand" -> + extract_fhe_type_from_param(operation_data[:rand_type]) + + "FheRandBounded" -> + extract_fhe_type_from_param(operation_data[:rand_type]) + + _ -> + extract_fhe_type_from_handle(operation_data[:result], &extract_fhe_type_from_result/1) + end + end + + defp extract_fhe_type_from_handle(nil, _extractor), do: "Unknown" + + defp extract_fhe_type_from_handle(handle, extractor), do: extractor.(handle) + + defp extract_fhe_type_from_param(nil), do: "Unknown" + + defp extract_fhe_type_from_param(param), do: FheOperatorPrices.get_type_name(param) + + defp extract_fhe_type_from_result(result) when is_binary(result) do + result_size = byte_size(result) + + if result_size >= 32 do + # Extract byte 30 (0-indexed, second to last byte) + <<_prefix::binary-size(30), type_byte::8, _suffix::binary-size(1)>> = result + FheOperatorPrices.get_type_name(type_byte) + else + "Unknown" + end + end + + defp extract_fhe_type_from_result(_), do: "Unknown" + + @doc """ + Extract inputs based on operation type. + """ + def extract_inputs(%{lhs: lhs, rhs: rhs}, _event_name) when not is_nil(lhs) do + %{ + lhs: Base.encode16(lhs, case: :lower), + rhs: Base.encode16(rhs, case: :lower) + } + end + + def extract_inputs(%{ct: ct}, _event_name) when not is_nil(ct) do + %{ct: Base.encode16(ct, case: :lower)} + end + + def extract_inputs(%{control: control, if_true: if_true, if_false: if_false}, "FheIfThenElse") do + %{ + control: Base.encode16(control, case: :lower), + if_true: Base.encode16(if_true, case: :lower), + if_false: Base.encode16(if_false, case: :lower) + } + end + + def extract_inputs(%{plaintext: pt}, "TrivialEncrypt") do + %{plaintext: pt} + end + + def extract_inputs(_, _), do: %{} + + @doc """ + Calculate HCU cost for an operation. + """ + def calculate_hcu_cost(event_name, fhe_type, is_scalar) do + operation_key = event_name_to_operation_key(event_name) + FheOperatorPrices.get_price(operation_key, fhe_type, is_scalar) + end + + def event_name_to_operation_key("FheAdd"), do: "fheAdd" + def event_name_to_operation_key("FheSub"), do: "fheSub" + def event_name_to_operation_key("FheMul"), do: "fheMul" + def event_name_to_operation_key("FheDiv"), do: "fheDiv" + def event_name_to_operation_key("FheRem"), do: "fheRem" + def event_name_to_operation_key("FheBitAnd"), do: "fheBitAnd" + def event_name_to_operation_key("FheBitOr"), do: "fheBitOr" + def event_name_to_operation_key("FheBitXor"), do: "fheBitXor" + def event_name_to_operation_key("FheShl"), do: "fheShl" + def event_name_to_operation_key("FheShr"), do: "fheShr" + def event_name_to_operation_key("FheRotl"), do: "fheRotl" + def event_name_to_operation_key("FheRotr"), do: "fheRotr" + def event_name_to_operation_key("FheEq"), do: "fheEq" + def event_name_to_operation_key("FheNe"), do: "fheNe" + def event_name_to_operation_key("FheGe"), do: "fheGe" + def event_name_to_operation_key("FheGt"), do: "fheGt" + def event_name_to_operation_key("FheLe"), do: "fheLe" + def event_name_to_operation_key("FheLt"), do: "fheLt" + def event_name_to_operation_key("FheMin"), do: "fheMin" + def event_name_to_operation_key("FheMax"), do: "fheMax" + def event_name_to_operation_key("FheNeg"), do: "fheNeg" + def event_name_to_operation_key("FheNot"), do: "fheNot" + def event_name_to_operation_key("TrivialEncrypt"), do: "trivialEncrypt" + def event_name_to_operation_key("Cast"), do: "cast" + def event_name_to_operation_key("FheIfThenElse"), do: "ifThenElse" + def event_name_to_operation_key("FheRand"), do: "fheRand" + def event_name_to_operation_key("FheRandBounded"), do: "fheRandBounded" + def event_name_to_operation_key(_), do: "unknown" + + @doc """ + Get operation category from event name. + """ + def get_operation_type(event_name) do + cond do + event_name in ["FheAdd", "FheSub", "FheMul", "FheDiv", "FheRem"] -> "arithmetic" + event_name in ["FheBitAnd", "FheBitOr", "FheBitXor", "FheShl", "FheShr", "FheRotl", "FheRotr"] -> "bitwise" + event_name in ["FheEq", "FheNe", "FheGe", "FheGt", "FheLe", "FheLt", "FheMin", "FheMax"] -> "comparison" + event_name in ["FheNeg", "FheNot"] -> "unary" + event_name in ["FheIfThenElse"] -> "control" + event_name in ["TrivialEncrypt", "Cast"] -> "encryption" + event_name in ["FheRand", "FheRandBounded"] -> "random" + true -> "other" + end + end + + @doc """ + Build HCU depth map tracking cumulative HCU for each handle. + """ + def build_hcu_depth_map(operations) do + Enum.reduce(operations, %{}, fn op, acc -> + result_handle = if is_binary(op.result), do: Base.encode16(op.result, case: :lower), else: nil + + if is_nil(result_handle) do + acc + else + depth = calculate_op_depth(op, acc) + Map.put(acc, result_handle, depth) + end + end) + end + + defp calculate_op_depth(op, acc) do + case op.inputs do + %{lhs: lhs, rhs: rhs} -> + lhs_depth = Map.get(acc, lhs, 0) + + if Map.get(op, :is_scalar, false) do + lhs_depth + op.hcu_cost + else + rhs_depth = Map.get(acc, rhs, 0) + max(lhs_depth, rhs_depth) + op.hcu_cost + end + + %{control: control, if_true: if_true, if_false: if_false} -> + control_depth = Map.get(acc, control, 0) + true_depth = Map.get(acc, if_true, 0) + false_depth = Map.get(acc, if_false, 0) + max(control_depth, max(true_depth, false_depth)) + op.hcu_cost + + %{ct: ct} -> + ct_depth = Map.get(acc, ct, 0) + ct_depth + op.hcu_cost + + _ -> + op.hcu_cost + end + end +end diff --git a/apps/explorer/lib/explorer/chain/import/runner/fhe_operations.ex b/apps/explorer/lib/explorer/chain/import/runner/fhe_operations.ex new file mode 100644 index 000000000000..45d87f689004 --- /dev/null +++ b/apps/explorer/lib/explorer/chain/import/runner/fhe_operations.ex @@ -0,0 +1,155 @@ +defmodule Explorer.Chain.Import.Runner.FheOperations do + @moduledoc """ + Bulk imports FHE operations parsed from transaction logs. + + This runner handles the database insertion of FHE operations that were parsed + during block indexing. It follows the standard Blockscout import runner pattern + with proper conflict resolution and error handling. + + Also checks and sets FHE contract tags. + """ + + require Ecto.Query + require Logger + + alias Ecto.{Changeset, Multi} + alias Explorer.Chain.FheContractChecker + alias Explorer.Chain.{FheOperation, Import, Transaction} + alias Explorer.Repo + + import Ecto.Query, only: [from: 2] + + @behaviour Import.Runner + + @timeout 60_000 + + @type imported :: [FheOperation.t()] + + # Required by Import.Runner behaviour + @impl Import.Runner + def ecto_schema_module, do: FheOperation + + @impl Import.Runner + def option_key, do: :fhe_operations + + @impl Import.Runner + def imported_table_row do + %{ + value_type: "[#{ecto_schema_module()}.t()]", + value_description: "List of `t:#{ecto_schema_module()}.t/0`s" + } + end + + @impl Import.Runner + def run(multi, changes_list, %{timestamps: timestamps} = options) do + insert_options = + options + |> Map.get(option_key(), %{}) + |> Map.put_new(:timeout, timeout()) + |> Map.put(:timestamps, timestamps) + + Multi.run(multi, :insert_fhe_operations, fn repo, _ -> + insert(repo, changes_list, insert_options) + end) + end + + @impl Import.Runner + def timeout, do: @timeout + + @spec insert(Ecto.Repo.t(), [map()], %{ + required(:timeout) => timeout(), + required(:timestamps) => Import.timestamps() + }) :: {:ok, [FheOperation.t()]} | {:error, [Changeset.t()]} + defp insert(repo, changes_list, %{timeout: timeout, timestamps: timestamps}) when is_list(changes_list) do + # Return early if no FHE operations to insert + if Enum.empty?(changes_list) do + {:ok, []} + else + # Order by transaction_hash and log_index for deterministic insertion + ordered_changes_list = + changes_list + |> Enum.sort_by(&{&1.transaction_hash, &1.log_index}) + + # Insert with conflict resolution + # If the same operation exists (same transaction_hash + log_index), replace it + case Import.insert_changes_list( + repo, + ordered_changes_list, + conflict_target: [:transaction_hash, :log_index], + on_conflict: :replace_all, + for: FheOperation, + returning: true, + timeout: timeout, + timestamps: timestamps + ) do + {:ok, inserted} -> + tag_contracts_from_fhe_operations(ordered_changes_list) + update_transaction_fhe_counts(repo, ordered_changes_list) + {:ok, inserted} + end + end + end + + # Tags contracts that were called in transactions with FHE operations + defp tag_contracts_from_fhe_operations(fhe_operations) when is_list(fhe_operations) do + contract_addresses = get_all_contract_addresses_from_fhe_operations(fhe_operations) + + Enum.each(contract_addresses, fn address_hash -> + FheContractChecker.check_and_save_fhe_status(address_hash, []) + end) + + :ok + end + + # Gets all unique contract addresses from FHE operations: + # 1. Caller addresses from FHE operation logs (contracts that called FHE operations) + # 2. Transaction to_addresses (contracts that were called in transactions with FHE operations) + defp get_all_contract_addresses_from_fhe_operations(fhe_operations) do + # Get caller addresses from FHE operation logs + caller_addresses = + fhe_operations + |> Enum.map(&Map.get(&1, :caller)) + |> Enum.filter(&(not is_nil(&1))) + |> Enum.uniq() + + # Get transaction to_addresses from transactions with FHE operations + transaction_hashes = + fhe_operations + |> Enum.map(& &1.transaction_hash) + |> Enum.uniq() + + query = + from( + t in Transaction, + where: t.hash in ^transaction_hashes, + where: t.block_consensus == true, + where: not is_nil(t.to_address_hash), + select: t.to_address_hash, + distinct: true + ) + + to_addresses = Repo.all(query) + + # Combine and deduplicate + (caller_addresses ++ to_addresses) |> Enum.uniq() + end + + # Updates fhe_operations_count on transactions table for precomputed list API performance + defp update_transaction_fhe_counts(repo, fhe_operations) when is_list(fhe_operations) do + counts_by_hash = + fhe_operations + |> Enum.group_by(& &1.transaction_hash, & &1.log_index) + |> Enum.map(fn {hash, log_indices} -> {hash, length(log_indices)} end) + + if counts_by_hash != [] do + Enum.each(counts_by_hash, fn {transaction_hash, count} -> + repo.update_all( + from(t in Transaction, where: t.hash == ^transaction_hash), + set: [fhe_operations_count: count] + ) + end) + end + + :ok + end +end diff --git a/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex b/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex index 8c25a9a05495..e868a1d66316 100644 --- a/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex +++ b/apps/explorer/lib/explorer/chain/import/stage/block_transaction_referencing.ex @@ -14,7 +14,8 @@ defmodule Explorer.Chain.Import.Stage.BlockTransactionReferencing do Runner.Block.SecondDegreeRelations, Runner.TransactionActions, Runner.Withdrawals, - Runner.SignedAuthorizations + Runner.SignedAuthorizations, + Runner.FheOperations ] @impl Stage diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index 19abe45f0425..d22fd221eebc 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -251,6 +251,7 @@ defmodule Explorer.Chain.Transaction.Schema do field(:max_fee_per_gas, Wei) field(:type, :integer) field(:has_error_in_internal_transactions, :boolean) + field(:fhe_operations_count, :integer) field(:has_token_transfers, :boolean, virtual: true) # stability virtual fields diff --git a/apps/explorer/lib/explorer/tags/address_tag.ex b/apps/explorer/lib/explorer/tags/address_tag.ex index d6c20c2d2694..633b25618362 100644 --- a/apps/explorer/lib/explorer/tags/address_tag.ex +++ b/apps/explorer/lib/explorer/tags/address_tag.ex @@ -63,7 +63,7 @@ defmodule Explorer.Tags.AddressTag do @doc """ Fetches AddressTag.t() by label name from the DB """ - @spec get_id_by_label(String.t()) :: non_neg_integer() + @spec get_id_by_label(String.t() | nil) :: non_neg_integer() | nil def get_id_by_label(nil), do: nil def get_id_by_label(label) do diff --git a/apps/explorer/lib/explorer/tags/address_tag_cataloger.ex b/apps/explorer/lib/explorer/tags/address_tag_cataloger.ex index 5295a29d30f6..bc695bdab465 100644 --- a/apps/explorer/lib/explorer/tags/address_tag_cataloger.ex +++ b/apps/explorer/lib/explorer/tags/address_tag_cataloger.ex @@ -48,7 +48,7 @@ defmodule Explorer.Tags.AddressTag.Cataloger do all_tags |> Enum.each(fn %{label: tag_name} -> if tag_name !== "validator" && tag_name !== "amb bridge mediators" && tag_name !== "omni bridge" && - tag_name !== "l2" && !String.contains?(tag_name, "chainlink") do + tag_name !== "l2" && tag_name !== "fhe" && !String.contains?(tag_name, "chainlink") do env_var_name = "CUSTOM_CONTRACT_ADDRESSES_#{tag_name_to_env_var_part(tag_name)}" set_tag_for_env_var_multiple_addresses(env_var_name, tag_name) end diff --git a/apps/explorer/priv/repo/migrations/20251214200315_create_fhe_operations.exs b/apps/explorer/priv/repo/migrations/20251214200315_create_fhe_operations.exs new file mode 100644 index 000000000000..2b84197be38e --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20251214200315_create_fhe_operations.exs @@ -0,0 +1,46 @@ +defmodule Explorer.Repo.Migrations.CreateFheOperations do + use Ecto.Migration + + def change do + create table(:fhe_operations, primary_key: false) do + add(:transaction_hash, references(:transactions, column: :hash, on_delete: :delete_all, type: :bytea), + null: false, + primary_key: true + ) + + add(:log_index, :integer, null: false, primary_key: true) + add(:block_hash, references(:blocks, column: :hash, on_delete: :delete_all, type: :bytea), null: false) + add(:block_number, :bigint, null: false) + + # Operation details + add(:operation, :string, size: 50, null: false) + add(:operation_type, :string, size: 20, null: false) + add(:fhe_type, :string, size: 10, null: false) + add(:is_scalar, :boolean, null: false) + + # HCU metrics + add(:hcu_cost, :integer, null: false) + add(:hcu_depth, :integer, null: false) + + # Addresses and handles + add(:caller, :bytea) + add(:result_handle, :bytea, null: false) + add(:input_handles, :map) + + timestamps(null: false, type: :utc_datetime_usec) + end + + # Indexes for efficient queries + create(index(:fhe_operations, [:transaction_hash])) + create(index(:fhe_operations, [:log_index])) + create(index(:fhe_operations, [:caller], where: "caller IS NOT NULL")) + create(index(:fhe_operations, [:operation_type])) + create(index(:fhe_operations, [:fhe_type])) + create(index(:fhe_operations, [:operation])) + + # Precomputed FHE operations count on transactions for list API performance + alter table(:transactions) do + add(:fhe_operations_count, :integer, default: 0, null: false) + end + end +end diff --git a/apps/explorer/test/explorer/chain/fhe/fhe_contract_checker_test.exs b/apps/explorer/test/explorer/chain/fhe/fhe_contract_checker_test.exs new file mode 100644 index 000000000000..69834a64902e --- /dev/null +++ b/apps/explorer/test/explorer/chain/fhe/fhe_contract_checker_test.exs @@ -0,0 +1,166 @@ +defmodule Explorer.Chain.FheContractCheckerTest do + use Explorer.DataCase + + import Mox + + alias Explorer.Chain.{Address, FheContractChecker, Hash} + alias Explorer.Tags.{AddressTag, AddressToTag} + alias Explorer.Repo + + setup :verify_on_exit! + setup :set_mox_global + + describe "fhe_contract?/1" do + test "returns true for FHE contract with non-zero protocol ID" do + address_hash = build(:address).hash + + # Mock RPC response with non-zero protocol ID + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000001"} + end) + + assert {:ok, true} = FheContractChecker.fhe_contract?(address_hash) + end + + test "returns false for non-FHE contract with zero protocol ID" do + address_hash = build(:address).hash + + # Mock RPC response with zero protocol ID + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000000"} + end) + + assert {:ok, false} = FheContractChecker.fhe_contract?(address_hash) + end + + test "returns false for RPC error" do + address_hash = build(:address).hash + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:error, :timeout} + end) + + assert {:ok, false} = FheContractChecker.fhe_contract?(address_hash) + end + + test "returns false for invalid response format" do + address_hash = build(:address).hash + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, []} + end) + + assert {:ok, false} = FheContractChecker.fhe_contract?(address_hash) + end + + test "handles string address hash" do + address_string = "0x" <> String.duplicate("1", 40) + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000000"} + end) + + assert {:ok, false} = FheContractChecker.fhe_contract?(address_string) + end + + test "returns error for invalid address string" do + assert {:error, :invalid_hash} = FheContractChecker.fhe_contract?("invalid") + end + end + + describe "already_checked?/2" do + test "returns true when address is already tagged as FHE" do + address = insert(:address) + tag = insert(:address_tag, label: "fhe", display_name: "FHE") + insert(:address_to_tag, tag: tag, address: address) + + assert true == FheContractChecker.already_checked?(address.hash, []) + end + + test "returns false when address is not tagged" do + address = insert(:address) + _tag = insert(:address_tag, label: "fhe", display_name: "FHE") + + assert false == FheContractChecker.already_checked?(address.hash, []) + end + + test "returns false when FHE tag does not exist" do + address = insert(:address) + + assert false == FheContractChecker.already_checked?(address.hash, []) + end + end + + describe "check_and_save_fhe_status/2" do + test "saves FHE tag when contract is FHE" do + address = insert(:address, contract_code: "0x6080604052") + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000001"} + end) + + assert :ok = FheContractChecker.check_and_save_fhe_status(address.hash, []) + + # Verify tag was created + tag = Repo.get_by(AddressTag, label: "fhe") + assert tag != nil + assert tag.display_name == "FHE" + + # Verify address is tagged + tag_id = AddressTag.get_id_by_label("fhe") + + assert Repo.exists?( + from(att in AddressToTag, + where: att.address_hash == ^Hash.to_string(address.hash) and att.tag_id == ^tag_id + ) + ) + end + + test "returns :ok when contract is not FHE" do + address = insert(:address, contract_code: "0x6080604052") + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000000"} + end) + + assert :ok = FheContractChecker.check_and_save_fhe_status(address.hash, []) + end + + test "returns :already_checked when address is already tagged" do + address = insert(:address, contract_code: "0x6080604052") + tag = insert(:address_tag, label: "fhe", display_name: "FHE") + insert(:address_to_tag, tag: tag, address: address) + + assert :already_checked = FheContractChecker.check_and_save_fhe_status(address.hash, []) + end + + test "returns :empty when address is not a contract" do + address = insert(:address, contract_code: nil) + + assert :empty = FheContractChecker.check_and_save_fhe_status(address.hash, []) + end + + test "returns :empty when address_hash is nil" do + assert :empty = FheContractChecker.check_and_save_fhe_status(nil, []) + end + + test "returns :ok when RPC call fails (treats as non-FHE to avoid retry loops)" do + address = insert(:address, contract_code: "0x6080604052") + + EthereumJSONRPC.Mox + |> expect(:json_rpc, fn _request, _options -> + {:error, :timeout} + end) + + # The implementation treats RPC errors as "not FHE" (returns :ok) to avoid retry loops + assert :ok = FheContractChecker.check_and_save_fhe_status(address.hash, []) + end + end +end diff --git a/apps/explorer/test/explorer/chain/fhe/fhe_operation_test.exs b/apps/explorer/test/explorer/chain/fhe/fhe_operation_test.exs new file mode 100644 index 000000000000..12428f9e09ee --- /dev/null +++ b/apps/explorer/test/explorer/chain/fhe/fhe_operation_test.exs @@ -0,0 +1,141 @@ +defmodule Explorer.Chain.FheOperationTest do + use Explorer.DataCase + + alias Explorer.Chain.{Block, FheOperation, Hash, Transaction} + + describe "by_transaction_hash/1" do + test "returns empty list when no operations exist" do + transaction = insert(:transaction) |> with_block() + + assert [] == FheOperation.by_transaction_hash(transaction.hash) + end + + test "returns operations ordered by log_index" do + transaction = insert(:transaction) |> with_block() + block = transaction.block + + operation_3 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 3, + block_hash: block.hash, + block_number: block.number + ) + + operation_1 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number + ) + + operation_2 = + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 2, + block_hash: block.hash, + block_number: block.number + ) + + operations = FheOperation.by_transaction_hash(transaction.hash) + + assert length(operations) == 3 + assert Enum.at(operations, 0).log_index == operation_1.log_index + assert Enum.at(operations, 1).log_index == operation_2.log_index + assert Enum.at(operations, 2).log_index == operation_3.log_index + end + + test "only returns operations for specified transaction" do + transaction_1 = insert(:transaction) |> with_block() + transaction_2 = insert(:transaction) |> with_block() + + insert(:fhe_operation, + transaction_hash: transaction_1.hash, + log_index: 1, + block_hash: transaction_1.block.hash, + block_number: transaction_1.block_number + ) + + insert(:fhe_operation, + transaction_hash: transaction_2.hash, + log_index: 1, + block_hash: transaction_2.block.hash, + block_number: transaction_2.block_number + ) + + operations = FheOperation.by_transaction_hash(transaction_1.hash) + + assert length(operations) == 1 + assert Enum.at(operations, 0).transaction_hash == transaction_1.hash + end + end + + describe "transaction_metrics/1" do + test "returns zero metrics when no operations exist" do + transaction = insert(:transaction) |> with_block() + + metrics = FheOperation.transaction_metrics(transaction.hash) + + assert metrics.operation_count == 0 + assert metrics.total_hcu == 0 + assert metrics.max_depth_hcu == 0 + end + + test "calculates correct metrics for single operation" do + transaction = insert(:transaction) |> with_block() + + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + hcu_cost: 100, + hcu_depth: 1 + ) + + metrics = FheOperation.transaction_metrics(transaction.hash) + + assert metrics.operation_count == 1 + assert metrics.total_hcu == 100 + assert metrics.max_depth_hcu == 1 + end + + test "calculates correct metrics for multiple operations" do + transaction = insert(:transaction) |> with_block() + + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + hcu_cost: 100, + hcu_depth: 1 + ) + + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 2, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + hcu_cost: 200, + hcu_depth: 3 + ) + + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 3, + block_hash: transaction.block.hash, + block_number: transaction.block_number, + hcu_cost: 150, + hcu_depth: 2 + ) + + metrics = FheOperation.transaction_metrics(transaction.hash) + + assert metrics.operation_count == 3 + assert metrics.total_hcu == 450 + assert metrics.max_depth_hcu == 3 + end + end +end diff --git a/apps/explorer/test/explorer/chain/fhe/fhe_operator_prices_test.exs b/apps/explorer/test/explorer/chain/fhe/fhe_operator_prices_test.exs new file mode 100644 index 000000000000..5caf03dac5cc --- /dev/null +++ b/apps/explorer/test/explorer/chain/fhe/fhe_operator_prices_test.exs @@ -0,0 +1,84 @@ +defmodule Explorer.Chain.FheOperatorPricesTest do + use ExUnit.Case, async: true + + alias Explorer.Chain.FheOperatorPrices + + describe "get_price/3" do + test "returns correct price for scalar FheAdd operation" do + assert 84_000 == FheOperatorPrices.get_price("fheAdd", "Uint8", true) + assert 93_000 == FheOperatorPrices.get_price("fheAdd", "Uint16", true) + assert 95_000 == FheOperatorPrices.get_price("fheAdd", "Uint32", true) + end + + test "returns correct price for non-scalar FheAdd operation" do + assert 88_000 == FheOperatorPrices.get_price("fheAdd", "Uint8", false) + assert 93_000 == FheOperatorPrices.get_price("fheAdd", "Uint16", false) + assert 125_000 == FheOperatorPrices.get_price("fheAdd", "Uint32", false) + end + + test "returns correct price for FheMul operation" do + assert 122_000 == FheOperatorPrices.get_price("fheMul", "Uint8", true) + assert 150_000 == FheOperatorPrices.get_price("fheMul", "Uint8", false) + assert 1_686_000 == FheOperatorPrices.get_price("fheMul", "Uint128", false) + end + + test "returns correct price for FheDiv operation" do + assert 210_000 == FheOperatorPrices.get_price("fheDiv", "Uint8", true) + assert 1_225_000 == FheOperatorPrices.get_price("fheDiv", "Uint128", true) + end + + test "returns correct price for bitwise operations" do + assert 22_000 == FheOperatorPrices.get_price("fheBitAnd", "Bool", true) + assert 25_000 == FheOperatorPrices.get_price("fheBitAnd", "Bool", false) + assert 31_000 == FheOperatorPrices.get_price("fheBitAnd", "Uint8", true) + end + + test "returns 0 for unknown operation" do + assert 0 == FheOperatorPrices.get_price("unknown", "Uint8", true) + end + + test "returns 0 for unknown FHE type" do + assert 0 == FheOperatorPrices.get_price("fheAdd", "Unknown", true) + end + + test "handles operations that only have scalar prices" do + # Operations that only have scalar prices return scalar price regardless of is_scalar flag + assert 210_000 == FheOperatorPrices.get_price("fheDiv", "Uint8", true) + assert 210_000 == FheOperatorPrices.get_price("fheDiv", "Uint8", false) + assert 1_225_000 == FheOperatorPrices.get_price("fheDiv", "Uint128", false) + end + + test "handles operations with types structure (fheRand)" do + assert 19_000 == FheOperatorPrices.get_price("fheRand", "Bool", false) + assert 23_000 == FheOperatorPrices.get_price("fheRand", "Uint8", false) + assert 25_000 == FheOperatorPrices.get_price("fheRand", "Uint128", false) + end + + test "handles fheRandBounded operation" do + assert 23_000 == FheOperatorPrices.get_price("fheRandBounded", "Uint8", false) + assert 30_000 == FheOperatorPrices.get_price("fheRandBounded", "Uint256", false) + end + end + + describe "get_type_name/1" do + test "returns correct type name for type bytes" do + # Mapping matches fhevm ALL_FHE_TYPE_INFOS (fheTypeInfos.ts) + assert "Bool" == FheOperatorPrices.get_type_name(0) + assert "Uint4" == FheOperatorPrices.get_type_name(1) + assert "Uint8" == FheOperatorPrices.get_type_name(2) + assert "Uint16" == FheOperatorPrices.get_type_name(3) + assert "Uint32" == FheOperatorPrices.get_type_name(4) + assert "Uint64" == FheOperatorPrices.get_type_name(5) + assert "Uint128" == FheOperatorPrices.get_type_name(6) + assert "Uint160" == FheOperatorPrices.get_type_name(7) + assert "Uint256" == FheOperatorPrices.get_type_name(8) + assert "Uint512" == FheOperatorPrices.get_type_name(9) + assert "Uint1024" == FheOperatorPrices.get_type_name(10) + end + + test "returns Unknown for invalid type byte" do + assert "Unknown" == FheOperatorPrices.get_type_name(99) + assert "Unknown" == FheOperatorPrices.get_type_name(-1) + end + end +end diff --git a/apps/explorer/test/explorer/chain/fhe/parser_test.exs b/apps/explorer/test/explorer/chain/fhe/parser_test.exs new file mode 100644 index 000000000000..b3e83bd636ba --- /dev/null +++ b/apps/explorer/test/explorer/chain/fhe/parser_test.exs @@ -0,0 +1,798 @@ +defmodule Explorer.Chain.Fhe.ParserTest do + use Explorer.DataCase + + alias ABI.TypeEncoder + alias Explorer.Chain.{Fhe.Parser, Hash, Log} + alias Explorer.Helper + + describe "get_event_name/1" do + test "returns correct event name for FheAdd" do + assert "FheAdd" == Parser.get_event_name("0xdb9050d65240431621d61d6f94b970e63f53a67a5766614ee6e5c5bbd41c8e2e") + end + + test "returns correct event name for TrivialEncrypt" do + assert "TrivialEncrypt" == + Parser.get_event_name("0x063ccd1bba45151d91f6a418065047a3d048d058a922535747bb2b575a01d236") + end + + test "returns Unknown for invalid event signature" do + assert "Unknown" == Parser.get_event_name("0xinvalid") + end + + test "handles case-insensitive topic" do + assert "FheAdd" == Parser.get_event_name("0xDB9050D65240431621D61D6F94B970E63F53A67A5766614EE6E5C5BBD41C8E2E") + end + end + + describe "extract_caller/1" do + test "extracts caller from Hash struct" do + address_bytes = <<1::160>> + # Create a 32-byte hash (12 bytes padding + 20 bytes address) + full_hash_bytes = <<0::96, address_bytes::binary>> + {:ok, hash} = Hash.cast(Hash.Full, "0x" <> Base.encode16(full_hash_bytes, case: :lower)) + + result = Parser.extract_caller(hash) + expected = "0x" <> Base.encode16(address_bytes, case: :lower) + assert expected == result + end + + test "extracts caller from binary topic" do + address_bytes = <<1::160>> + topic = <<0::96, address_bytes::binary>> + + result = Parser.extract_caller(topic) + assert "0x" <> Base.encode16(address_bytes, case: :lower) == result + end + + test "extracts caller from 32-byte binary topic" do + # Test with a 32-byte binary (what a topic actually is) + address_bytes = + <<0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, + 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8, 0x11::8>> + + topic = <<0::96, address_bytes::binary>> + + result = Parser.extract_caller(topic) + expected = "0x" <> Base.encode16(address_bytes, case: :lower) + assert expected == result + end + + test "returns nil for nil input" do + assert nil == Parser.extract_caller(nil) + end + end + + describe "decode_event_data/2" do + test "decodes binary operation (FheAdd)" do + lhs = <<1::256>> + rhs = <<2::256>> + scalar_byte = <<0>> + result = <<3::256>> + + data = + "0x" <> + Base.encode16( + TypeEncoder.encode([lhs, rhs, scalar_byte, result], [{:bytes, 32}, {:bytes, 32}, {:bytes, 1}, {:bytes, 32}]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "FheAdd") + + assert decoded.lhs == lhs + assert decoded.rhs == rhs + assert decoded.scalar_byte == scalar_byte + assert decoded.result == result + end + + test "decodes unary operation (FheNeg)" do + ct = <<1::256>> + result = <<2::256>> + + data = "0x" <> Base.encode16(TypeEncoder.encode([ct, result], [{:bytes, 32}, {:bytes, 32}]), case: :lower) + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "FheNeg") + + assert decoded.ct == ct + assert decoded.result == result + end + + test "decodes TrivialEncrypt operation" do + pt = 123 + to_type = 1 + result = <<3::256>> + + data = + "0x" <> + Base.encode16(TypeEncoder.encode([pt, to_type, result], [{:uint, 256}, {:uint, 8}, {:bytes, 32}]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "TrivialEncrypt") + + assert decoded.plaintext == pt + assert decoded.to_type == to_type + assert decoded.result == result + end + + test "decodes Cast operation" do + ct = <<1::256>> + to_type = 2 + result = <<3::256>> + + data = + "0x" <> + Base.encode16(TypeEncoder.encode([ct, to_type, result], [{:bytes, 32}, {:uint, 8}, {:bytes, 32}]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "Cast") + + assert decoded.ct == ct + assert decoded.to_type == to_type + assert decoded.result == result + end + + test "decodes FheIfThenElse operation" do + control = <<1::256>> + if_true = <<2::256>> + if_false = <<3::256>> + result = <<4::256>> + + data = + "0x" <> + Base.encode16( + TypeEncoder.encode([control, if_true, if_false, result], [ + {:bytes, 32}, + {:bytes, 32}, + {:bytes, 32}, + {:bytes, 32} + ]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "FheIfThenElse") + + assert decoded.control == control + assert decoded.if_true == if_true + assert decoded.if_false == if_false + assert decoded.result == result + end + + test "decodes FheRand operation" do + rand_type = 1 + seed = <<1::128>> + result = <<2::256>> + + data = + "0x" <> + Base.encode16(TypeEncoder.encode([rand_type, seed, result], [{:uint, 8}, {:bytes, 16}, {:bytes, 32}]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "FheRand") + + assert decoded.rand_type == rand_type + assert decoded.seed == seed + assert decoded.result == result + end + + test "decodes FheRandBounded operation" do + upper_bound = 100 + rand_type = 1 + seed = <<1::128>> + result = <<2::256>> + + data = + "0x" <> + Base.encode16( + TypeEncoder.encode([upper_bound, rand_type, seed, result], [ + {:uint, 256}, + {:uint, 8}, + {:bytes, 16}, + {:bytes, 32} + ]), + case: :lower + ) + + log = %Log{data: data} + + decoded = Parser.decode_event_data(log, "FheRandBounded") + + assert decoded.upper_bound == upper_bound + assert decoded.rand_type == rand_type + assert decoded.seed == seed + assert decoded.result == result + end + + test "returns default result for unknown event" do + log = %Log{data: <<>>} + decoded = Parser.decode_event_data(log, "UnknownEvent") + + assert decoded.result == <<0::256>> + end + end + + describe "extract_fhe_type/2" do + test "extracts type from TrivialEncrypt to_type" do + operation_data = %{to_type: 2, result: <<0::256>>} + assert "Uint8" == Parser.extract_fhe_type(operation_data, "TrivialEncrypt") + end + + test "extracts all types from TrivialEncrypt to_type" do + # Test all main types + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + operation_data = %{to_type: type_index, result: <<0::256>>} + + assert expected_type == Parser.extract_fhe_type(operation_data, "TrivialEncrypt"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from Cast input handle (ct)" do + # Cast extracts type from input handle (ct), not to_type parameter + # Type byte at position 30 (0-indexed) in the ct handle + # ct handle with type 3 (Uint16) at byte 30 + ct = <<0::240, 3::8, 0::8>> + operation_data = %{ct: ct, to_type: 5, result: <<0::256>>} + assert "Uint16" == Parser.extract_fhe_type(operation_data, "Cast") + end + + test "extracts all types from Cast input handle" do + # Test all main types in Cast + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + ct = <<0::240, type_index::8, 0::8>> + operation_data = %{ct: ct, to_type: 0, result: <<0::256>>} + + assert expected_type == Parser.extract_fhe_type(operation_data, "Cast"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from comparison operations LHS handle" do + # Comparison operations (FheEq, FheNe, etc.) extract type from LHS handle, not result + # LHS handle with type 2 (Uint8) at byte 30 + lhs = <<0::240, 2::8, 0::8>> + operation_data = %{lhs: lhs, rhs: <<0::256>>, result: <<0::240, 6::8, 0::8>>} + assert "Uint8" == Parser.extract_fhe_type(operation_data, "FheEq") + end + + test "extracts all types from comparison operations LHS handle" do + # Test all main types in comparison operations + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + lhs = <<0::240, type_index::8, 0::8>> + operation_data = %{lhs: lhs, rhs: <<0::256>>, result: <<0::256>>} + + assert expected_type == Parser.extract_fhe_type(operation_data, "FheEq"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from result handle for most operations" do + # Most operations extract type from result handle + # Type byte at position 30 (0-indexed) + # Result handle with type 2 (Uint8) at byte 30 + result = <<0::240, 2::8, 0::8>> + operation_data = %{result: result} + assert "Uint8" == Parser.extract_fhe_type(operation_data, "FheAdd") + end + + test "extracts all types from result handle for binary operations" do + # Test all main types in result handle + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + result = <<0::240, type_index::8, 0::8>> + operation_data = %{result: result} + + assert expected_type == Parser.extract_fhe_type(operation_data, "FheAdd"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from unary operation input handle" do + # Unary operations (Cast, FheNot, FheNeg) extract type from input handle (ct) + # ct handle with type 4 (Uint32) at byte 30 + ct = <<0::240, 4::8, 0::8>> + operation_data = %{ct: ct, result: <<0::256>>} + assert "Uint32" == Parser.extract_fhe_type(operation_data, "FheNeg") + end + + test "extracts all types from unary operation input handle" do + # Test all main types in unary operations + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + ct = <<0::240, type_index::8, 0::8>> + operation_data = %{ct: ct, result: <<0::256>>} + + assert expected_type == Parser.extract_fhe_type(operation_data, "FheNeg"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from FheRand randType parameter" do + # rand_type: 2 = Uint8 + operation_data = %{rand_type: 2, seed: <<0::128>>, result: <<0::256>>} + assert "Uint8" == Parser.extract_fhe_type(operation_data, "FheRand") + end + + test "extracts all types from FheRand randType parameter" do + # Test all main types in FheRand + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + operation_data = %{rand_type: type_index, seed: <<0::128>>, result: <<0::256>>} + + assert expected_type == Parser.extract_fhe_type(operation_data, "FheRand"), + "Failed for type_index #{type_index}, expected #{expected_type}" + end) + end + + test "extracts type from FheRandBounded randType parameter" do + # rand_type: 2 = Uint8 + operation_data = %{rand_type: 2, upper_bound: 100, seed: <<0::128>>, result: <<0::256>>} + assert "Uint8" == Parser.extract_fhe_type(operation_data, "FheRandBounded") + end + + test "returns Unknown for invalid result" do + operation_data = %{result: <<>>} + assert "Unknown" == Parser.extract_fhe_type(operation_data, "FheAdd") + end + + test "returns Unknown for missing required data" do + # TrivialEncrypt without to_type + assert "Unknown" == Parser.extract_fhe_type(%{}, "TrivialEncrypt") + + # Cast without ct + assert "Unknown" == Parser.extract_fhe_type(%{}, "Cast") + + # Comparison without lhs + assert "Unknown" == Parser.extract_fhe_type(%{}, "FheEq") + + # FheRand without rand_type + assert "Unknown" == Parser.extract_fhe_type(%{}, "FheRand") + end + end + + describe "extract_inputs/2" do + test "extracts inputs for binary operation" do + operation_data = %{lhs: <<1::256>>, rhs: <<2::256>>} + inputs = Parser.extract_inputs(operation_data, "FheAdd") + + assert inputs.lhs == Base.encode16(<<1::256>>, case: :lower) + assert inputs.rhs == Base.encode16(<<2::256>>, case: :lower) + end + + test "extracts inputs for unary operation" do + operation_data = %{ct: <<1::256>>} + inputs = Parser.extract_inputs(operation_data, "FheNeg") + + assert inputs.ct == Base.encode16(<<1::256>>, case: :lower) + end + + test "extracts inputs for FheIfThenElse" do + operation_data = %{control: <<1::256>>, if_true: <<2::256>>, if_false: <<3::256>>} + inputs = Parser.extract_inputs(operation_data, "FheIfThenElse") + + assert inputs.control == Base.encode16(<<1::256>>, case: :lower) + assert inputs.if_true == Base.encode16(<<2::256>>, case: :lower) + assert inputs.if_false == Base.encode16(<<3::256>>, case: :lower) + end + + test "extracts inputs for TrivialEncrypt" do + operation_data = %{plaintext: 123} + inputs = Parser.extract_inputs(operation_data, "TrivialEncrypt") + + assert inputs.plaintext == 123 + end + + test "returns empty map for unknown operation" do + operation_data = %{} + inputs = Parser.extract_inputs(operation_data, "Unknown") + + assert inputs == %{} + end + end + + describe "calculate_hcu_cost/3" do + test "calculates HCU cost for FheAdd operation" do + cost = Parser.calculate_hcu_cost("FheAdd", "Uint8", false) + assert is_integer(cost) + assert cost > 0 + end + + test "calculates different cost for scalar vs non-scalar" do + scalar_cost = Parser.calculate_hcu_cost("FheAdd", "Uint8", true) + non_scalar_cost = Parser.calculate_hcu_cost("FheAdd", "Uint8", false) + + assert scalar_cost != non_scalar_cost + end + + test "calculates HCU cost for all types in FheAdd" do + # Test all main types for FheAdd non-scalar + test_cases = [ + {"Uint8", 88_000}, + {"Uint16", 93_000}, + {"Uint32", 125_000}, + {"Uint64", 162_000}, + {"Uint128", 259_000} + ] + + Enum.each(test_cases, fn {type, expected_cost} -> + cost = Parser.calculate_hcu_cost("FheAdd", type, false) + + assert cost == expected_cost, + "FheAdd non-scalar #{type} should cost #{expected_cost}, got #{cost}" + end) + + # Test all main types for FheAdd scalar + scalar_cases = [ + {"Uint8", 84_000}, + {"Uint16", 93_000}, + {"Uint32", 95_000}, + {"Uint64", 133_000}, + {"Uint128", 172_000} + ] + + Enum.each(scalar_cases, fn {type, expected_cost} -> + cost = Parser.calculate_hcu_cost("FheAdd", type, true) + + assert cost == expected_cost, + "FheAdd scalar #{type} should cost #{expected_cost}, got #{cost}" + end) + end + + test "calculates HCU cost for FheGe all types" do + # Test FheGe non-scalar for all types + test_cases = [ + {"Uint8", 63_000}, + {"Uint16", 84_000}, + {"Uint32", 118_000}, + {"Uint64", 152_000}, + {"Uint128", 210_000} + ] + + Enum.each(test_cases, fn {type, expected_cost} -> + cost = Parser.calculate_hcu_cost("FheGe", type, false) + + assert cost == expected_cost, + "FheGe non-scalar #{type} should cost #{expected_cost}, got #{cost}" + end) + end + + test "calculates HCU cost for FheIfThenElse all types" do + # Test FheIfThenElse for all types + test_cases = [ + {"Bool", 55_000}, + {"Uint8", 55_000}, + {"Uint16", 55_000}, + {"Uint32", 55_000}, + {"Uint64", 55_000}, + {"Uint128", 57_000} + ] + + Enum.each(test_cases, fn {type, expected_cost} -> + cost = Parser.calculate_hcu_cost("FheIfThenElse", type, false) + + assert cost == expected_cost, + "FheIfThenElse #{type} should cost #{expected_cost}, got #{cost}" + end) + end + + test "calculates HCU cost for TrivialEncrypt all types" do + # Test TrivialEncrypt for all types + test_cases = [ + {"Bool", 32}, + {"Uint8", 32}, + {"Uint16", 32}, + {"Uint32", 32}, + {"Uint64", 32}, + {"Uint128", 32} + ] + + Enum.each(test_cases, fn {type, expected_cost} -> + cost = Parser.calculate_hcu_cost("TrivialEncrypt", type, false) + + assert cost == expected_cost, + "TrivialEncrypt #{type} should cost #{expected_cost}, got #{cost}" + end) + end + + test "returns 0 for unknown operation" do + cost = Parser.calculate_hcu_cost("UnknownOperation", "Uint8", false) + assert cost == 0 + end + + test "returns 0 for unknown type" do + cost = Parser.calculate_hcu_cost("FheAdd", "UnknownType", false) + assert cost == 0 + end + end + + describe "get_operation_type/1" do + test "returns arithmetic for arithmetic operations" do + assert "arithmetic" == Parser.get_operation_type("FheAdd") + assert "arithmetic" == Parser.get_operation_type("FheSub") + assert "arithmetic" == Parser.get_operation_type("FheMul") + assert "arithmetic" == Parser.get_operation_type("FheDiv") + assert "arithmetic" == Parser.get_operation_type("FheRem") + end + + test "returns bitwise for bitwise operations" do + assert "bitwise" == Parser.get_operation_type("FheBitAnd") + assert "bitwise" == Parser.get_operation_type("FheBitOr") + assert "bitwise" == Parser.get_operation_type("FheBitXor") + assert "bitwise" == Parser.get_operation_type("FheShl") + assert "bitwise" == Parser.get_operation_type("FheShr") + assert "bitwise" == Parser.get_operation_type("FheRotl") + assert "bitwise" == Parser.get_operation_type("FheRotr") + end + + test "returns comparison for comparison operations" do + assert "comparison" == Parser.get_operation_type("FheEq") + assert "comparison" == Parser.get_operation_type("FheNe") + assert "comparison" == Parser.get_operation_type("FheGe") + assert "comparison" == Parser.get_operation_type("FheGt") + assert "comparison" == Parser.get_operation_type("FheLe") + assert "comparison" == Parser.get_operation_type("FheLt") + assert "comparison" == Parser.get_operation_type("FheMin") + assert "comparison" == Parser.get_operation_type("FheMax") + end + + test "returns unary for unary operations" do + assert "unary" == Parser.get_operation_type("FheNeg") + assert "unary" == Parser.get_operation_type("FheNot") + end + + test "returns control for control operations" do + assert "control" == Parser.get_operation_type("FheIfThenElse") + end + + test "returns encryption for encryption operations" do + assert "encryption" == Parser.get_operation_type("TrivialEncrypt") + assert "encryption" == Parser.get_operation_type("Cast") + end + + test "returns random for random operations" do + assert "random" == Parser.get_operation_type("FheRand") + assert "random" == Parser.get_operation_type("FheRandBounded") + end + + test "returns other for unknown operations" do + assert "other" == Parser.get_operation_type("UnknownOperation") + end + end + + describe "FheOperatorPrices.get_type_name/1" do + test "returns correct type name for all main types" do + test_cases = [ + {0, "Bool"}, + {1, "Uint4"}, + {2, "Uint8"}, + {3, "Uint16"}, + {4, "Uint32"}, + {5, "Uint64"}, + {6, "Uint128"}, + {7, "Uint160"}, + {8, "Uint256"} + ] + + Enum.each(test_cases, fn {type_index, expected_type} -> + type_name = Explorer.Chain.FheOperatorPrices.get_type_name(type_index) + + assert type_name == expected_type, + "Type index #{type_index} should map to #{expected_type}, got #{type_name}" + end) + end + + test "returns Unknown for invalid type index" do + assert "Unknown" == Explorer.Chain.FheOperatorPrices.get_type_name(999) + assert "Unknown" == Explorer.Chain.FheOperatorPrices.get_type_name(-1) + end + end + + describe "build_hcu_depth_map/1" do + test "calculates depth for independent operations" do + operations = [ + %{result: <<1::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 100, is_scalar: false}, + %{result: <<2::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 200, is_scalar: false} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + result1 = Base.encode16(<<1::256>>, case: :lower) + result2 = Base.encode16(<<2::256>>, case: :lower) + + assert depth_map[result1] == 100 + assert depth_map[result2] == 200 + end + + test "calculates depth for dependent operations" do + result1 = Base.encode16(<<1::256>>, case: :lower) + result2 = Base.encode16(<<2::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 100, is_scalar: false}, + %{result: <<2::256>>, inputs: %{lhs: result1, rhs: "0x00"}, hcu_cost: 200, is_scalar: false} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + assert depth_map[result1] == 100 + # 100 (from result1) + 200 (current) + assert depth_map[result2] == 300 + end + + test "calculates depth for scalar operations (only LHS depth)" do + result1 = Base.encode16(<<1::256>>, case: :lower) + result2 = Base.encode16(<<2::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 100, is_scalar: false}, + # Scalar operation: RHS is plain value, so only use LHS depth + %{result: <<2::256>>, inputs: %{lhs: result1, rhs: "plain_value"}, hcu_cost: 200, is_scalar: true} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + assert depth_map[result1] == 100 + # Scalar: lhs_depth (100) + cost (200) = 300 (not max of lhs and rhs) + assert depth_map[result2] == 300 + end + + test "calculates depth for non-scalar operations (max of LHS and RHS)" do + result1 = Base.encode16(<<1::256>>, case: :lower) + result2 = Base.encode16(<<2::256>>, case: :lower) + result3 = Base.encode16(<<3::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 100, is_scalar: false}, + %{result: <<2::256>>, inputs: %{lhs: "0x00", rhs: "0x00"}, hcu_cost: 200, is_scalar: false}, + # Non-scalar: use max of both input depths + %{result: <<3::256>>, inputs: %{lhs: result1, rhs: result2}, hcu_cost: 300, is_scalar: false} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + assert depth_map[result1] == 100 + assert depth_map[result2] == 200 + # Non-scalar: max(100, 200) + 300 = 500 + assert depth_map[result3] == 500 + end + + test "calculates depth for unary operations" do + result1 = Base.encode16(<<1::256>>, case: :lower) + result2 = Base.encode16(<<2::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{}, hcu_cost: 100}, + %{result: <<2::256>>, inputs: %{ct: result1}, hcu_cost: 200} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + assert depth_map[result1] == 100 + # Unary: ct_depth (100) + cost (200) = 300 + assert depth_map[result2] == 300 + end + + test "calculates depth for FheIfThenElse" do + control = Base.encode16(<<1::256>>, case: :lower) + if_true = Base.encode16(<<2::256>>, case: :lower) + if_false = Base.encode16(<<3::256>>, case: :lower) + result = Base.encode16(<<4::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{}, hcu_cost: 50}, + %{result: <<2::256>>, inputs: %{}, hcu_cost: 100}, + %{result: <<3::256>>, inputs: %{}, hcu_cost: 150}, + %{result: <<4::256>>, inputs: %{control: control, if_true: if_true, if_false: if_false}, hcu_cost: 200} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + # max(50, 100, 150) + 200 + assert depth_map[result] == 350 + end + + test "calculates depth for operations with no inputs" do + result1 = Base.encode16(<<1::256>>, case: :lower) + + operations = [ + %{result: <<1::256>>, inputs: %{}, hcu_cost: 100} + ] + + depth_map = Parser.build_hcu_depth_map(operations) + + # Operations with no inputs (TrivialEncrypt, FheRand, etc.) have depth = cost only + assert depth_map[result1] == 100 + end + end + + describe "all_fhe_events/0" do + test "returns list of all FHE event signatures" do + events = Parser.all_fhe_events() + + assert is_list(events) + assert length(events) > 0 + assert "0xdb9050d65240431621d61d6f94b970e63f53a67a5766614ee6e5c5bbd41c8e2e" in events + end + end +end diff --git a/apps/explorer/test/explorer/chain/import/runner/fhe_operations_test.exs b/apps/explorer/test/explorer/chain/import/runner/fhe_operations_test.exs new file mode 100644 index 000000000000..ac86ecd6666e --- /dev/null +++ b/apps/explorer/test/explorer/chain/import/runner/fhe_operations_test.exs @@ -0,0 +1,227 @@ +defmodule Explorer.Chain.Import.Runner.FheOperationsTest do + use Explorer.DataCase + + import Mox + + alias Ecto.Multi + alias Explorer.Chain.{Block, FheOperation, Hash, Transaction} + alias Explorer.Chain.Import.Runner.FheOperations + alias Explorer.Repo + + setup :verify_on_exit! + setup :set_mox_global + + describe "run/3" do + test "inserts FHE operations successfully" do + transaction = insert(:transaction) |> with_block() + block = transaction.block + caller = insert(:address) + + changes_list = [ + %{ + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number, + operation: "FheAdd", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 100, + hcu_depth: 1, + caller: caller.hash, + result_handle: <<1::256>>, + input_handles: %{"lhs" => "0x00", "rhs" => "0x00"} + } + ] + + timestamp = DateTime.utc_now() + options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} + + # Multi.run unwraps the {:ok, value} tuple + assert {:ok, %{insert_fhe_operations: [inserted]}} = + Multi.new() + |> FheOperations.run(changes_list, options) + |> Repo.transaction() + + assert inserted.transaction_hash == transaction.hash + assert inserted.log_index == 1 + assert inserted.operation == "FheAdd" + end + + test "handles empty changes list" do + timestamp = DateTime.utc_now() + options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} + + # Multi.run unwraps the {:ok, value} tuple, so we get just the value + assert {:ok, %{insert_fhe_operations: []}} = + Multi.new() + |> FheOperations.run([], options) + |> Repo.transaction() + end + + test "handles conflict resolution on duplicate operations" do + transaction = insert(:transaction) |> with_block() + block = transaction.block + + # Insert first operation + insert(:fhe_operation, + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number, + operation: "FheAdd", + hcu_cost: 100 + ) + + # Try to insert same operation with different data + changes_list = [ + %{ + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number, + operation: "FheMul", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 200, + hcu_depth: 1, + # Include caller field (nil) to avoid KeyError + caller: nil, + result_handle: <<1::256>>, + input_handles: %{} + } + ] + + timestamp = DateTime.utc_now() + options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} + + # Multi.run unwraps the {:ok, value} tuple + assert {:ok, %{insert_fhe_operations: [updated]}} = + Multi.new() + |> FheOperations.run(changes_list, options) + |> Repo.transaction() + + # Should replace the existing operation + assert updated.operation == "FheMul" + assert updated.hcu_cost == 200 + + # Verify only one operation exists + operations = FheOperation.by_transaction_hash(transaction.hash) + assert length(operations) == 1 + end + + test "orders operations by transaction_hash and log_index" do + transaction = insert(:transaction) |> with_block() + block = transaction.block + + changes_list = [ + %{ + transaction_hash: transaction.hash, + log_index: 3, + block_hash: block.hash, + block_number: block.number, + operation: "FheAdd", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 100, + hcu_depth: 1, + caller: nil, + result_handle: <<3::256>>, + input_handles: %{} + }, + %{ + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number, + operation: "FheSub", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 100, + hcu_depth: 1, + caller: nil, + result_handle: <<1::256>>, + input_handles: %{} + }, + %{ + transaction_hash: transaction.hash, + log_index: 2, + block_hash: block.hash, + block_number: block.number, + operation: "FheMul", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 100, + hcu_depth: 1, + caller: nil, + result_handle: <<2::256>>, + input_handles: %{} + } + ] + + timestamp = DateTime.utc_now() + options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} + + # Multi.run unwraps the {:ok, value} tuple + assert {:ok, %{insert_fhe_operations: inserted}} = + Multi.new() + |> FheOperations.run(changes_list, options) + |> Repo.transaction() + + # Verify ordering + assert length(inserted) == 3 + assert Enum.at(inserted, 0).log_index == 1 + assert Enum.at(inserted, 1).log_index == 2 + assert Enum.at(inserted, 2).log_index == 3 + end + + test "tags contracts from FHE operations" do + transaction = insert(:transaction) |> with_block() + block = transaction.block + caller = insert(:address, contract_code: "0x6080604052") + to_address = insert(:address, contract_code: "0x6080604052") + + # Set transaction to_address + transaction + |> Transaction.changeset(%{to_address_hash: to_address.hash}) + |> Repo.update!() + + changes_list = [ + %{ + transaction_hash: transaction.hash, + log_index: 1, + block_hash: block.hash, + block_number: block.number, + operation: "FheAdd", + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: 100, + hcu_depth: 1, + caller: caller.hash, + result_handle: <<1::256>>, + input_handles: %{} + } + ] + + timestamp = DateTime.utc_now() + options = %{timestamps: %{inserted_at: timestamp, updated_at: timestamp}} + + # Mock RPC calls for FHE checks + EthereumJSONRPC.Mox + |> expect(:json_rpc, 2, fn _request, _options -> + {:ok, "0x0000000000000000000000000000000000000000000000000000000000000000"} + end) + + assert {:ok, _} = + Multi.new() + |> FheOperations.run(changes_list, options) + |> Repo.transaction() + end + end +end diff --git a/apps/explorer/test/support/factory.ex b/apps/explorer/test/support/factory.ex index 540b9380eb68..e2586b3e9c76 100644 --- a/apps/explorer/test/support/factory.ex +++ b/apps/explorer/test/support/factory.ex @@ -40,6 +40,7 @@ defmodule Explorer.Factory do Block, ContractMethod, Data, + FheOperation, Hash, InternalTransaction, InternalTransaction.DeleteQueue, @@ -1140,6 +1141,7 @@ defmodule Explorer.Factory do def transaction_factory do %Transaction{ from_address: build(:address), + fhe_operations_count: 0, gas: Enum.random(21_000..100_000), gas_price: Enum.random(10..99) * 1_000_000_00, hash: transaction_hash(), @@ -1836,6 +1838,31 @@ defmodule Explorer.Factory do } end + def fhe_operation_factory do + transaction = insert(:transaction) |> with_block() + block = transaction.block + caller = insert(:address) + + %FheOperation{ + transaction_hash: transaction.hash, + log_index: sequence("fhe_operation_log_index", & &1), + block_hash: block.hash, + block_number: block.number, + operation: sequence("fhe_operation", fn i -> "FheAdd#{i}" end), + operation_type: "arithmetic", + fhe_type: "Uint8", + is_scalar: false, + hcu_cost: sequence("fhe_operation_hcu_cost", fn i -> Kernel.+(100, i) end), + hcu_depth: sequence("fhe_operation_hcu_depth", fn i -> Kernel.+(1, rem(i, 5)) end), + caller: caller.hash, + result_handle: sequence("fhe_operation_result_handle", &<<&1::256>>), + input_handles: %{ + "lhs" => "0x" <> Base.encode16(<<1::256>>, case: :lower), + "rhs" => "0x" <> Base.encode16(<<2::256>>, case: :lower) + } + } + end + def migration_status_factory do %MigrationStatus{ migration_name: sequence("migration_", &"migration_#{&1}"), diff --git a/apps/indexer/lib/indexer/block/fetcher.ex b/apps/indexer/lib/indexer/block/fetcher.ex index a69569cb8c21..4e3e01eff1df 100644 --- a/apps/indexer/lib/indexer/block/fetcher.ex +++ b/apps/indexer/lib/indexer/block/fetcher.ex @@ -55,6 +55,7 @@ defmodule Indexer.Block.Fetcher do AddressCoinBalances, Addresses, AddressTokenBalances, + FheOperations, MintTransfers, SignedAuthorizations, TokenInstances, @@ -195,6 +196,7 @@ defmodule Indexer.Block.Fetcher do celo_pending_account_operations = parse_celo_pending_account_operations(logs), tokens = Enum.uniq(tokens ++ celo_tokens), %{transaction_actions: transaction_actions} = TransactionActions.parse(logs), + %{fhe_operations: fhe_operations} = FheOperations.parse(logs), %{mint_transfers: mint_transfers} = MintTransfers.parse(logs), optimism_withdrawals = if(callback_module == Indexer.Block.Realtime.Fetcher, do: OptimismWithdrawals.parse(logs), else: []), @@ -266,7 +268,8 @@ defmodule Indexer.Block.Fetcher do transactions: %{params: transactions_with_receipts}, withdrawals: %{params: withdrawals_params}, token_instances: %{params: token_instances}, - signed_authorizations: %{params: SignedAuthorizations.parse(transactions_with_receipts)} + signed_authorizations: %{params: SignedAuthorizations.parse(transactions_with_receipts)}, + fhe_operations: %{params: fhe_operations} }, chain_type_import_options = %{ diff --git a/apps/indexer/lib/indexer/transform/fhe_operations.ex b/apps/indexer/lib/indexer/transform/fhe_operations.ex new file mode 100644 index 000000000000..fb9812f1f778 --- /dev/null +++ b/apps/indexer/lib/indexer/transform/fhe_operations.ex @@ -0,0 +1,155 @@ +defmodule Indexer.Transform.FheOperations do + @moduledoc """ + Parses FHE (Fully Homomorphic Encryption) operations from transaction logs during indexing. + + This module extracts FHE operations from logs, calculates HCU metrics, and prepares + data for database insertion. + """ + + alias Explorer.Chain.Fhe.Parser + + @doc """ + Parses FHE operations from a list of logs. + + Returns a map with :fhe_operations key containing a list of params ready for database insertion. + """ + @spec parse(list()) :: %{fhe_operations: list()} + def parse(logs) do + if Application.get_env(:indexer, __MODULE__)[:enabled] do + filtered_logs = filter_fhe_logs(logs) + + fhe_operations = + filtered_logs + |> group_by_transaction() + |> parse_all_transactions() + |> List.flatten() + + %{fhe_operations: fhe_operations} + else + %{fhe_operations: []} + end + end + + defp filter_fhe_logs(logs) do + all_events = Parser.all_fhe_events() + + Enum.filter(logs, fn log -> + case log.first_topic do + nil -> + false + + "" -> + false + + topic -> + normalized = sanitize_first_topic(topic) + normalized in all_events + end + end) + end + + defp group_by_transaction(logs) do + Enum.group_by(logs, & &1.transaction_hash) + end + + defp parse_all_transactions(grouped_logs) do + Enum.map(grouped_logs, fn {_tx_hash, tx_logs} -> + parse_transaction_logs(tx_logs) + end) + end + + defp parse_transaction_logs(tx_logs) when is_list(tx_logs) and tx_logs != [] do + # Get first log to extract common fields + first_log = hd(tx_logs) + transaction_hash = first_log.transaction_hash + block_hash = first_log.block_hash + block_number = first_log.block_number + + # Sort logs by index to ensure correct HCU depth calculation order + sorted_logs = Enum.sort_by(tx_logs, & &1.index) + + # Parse operations using shared Parser logic + operations = Enum.map(sorted_logs, &parse_single_log/1) + + # Build HCU depth map + hcu_depth_map = Parser.build_hcu_depth_map(operations) + + # Convert to database params + Enum.map(operations, fn op -> + result_handle_key = if is_binary(op.result), do: Base.encode16(op.result, case: :lower), else: "unknown" + + %{ + transaction_hash: transaction_hash, + block_hash: block_hash, + log_index: op.log_index, + block_number: block_number, + operation: op.operation, + operation_type: op.type, + fhe_type: op.fhe_type, + is_scalar: op.is_scalar, + hcu_cost: op.hcu_cost, + hcu_depth: Map.get(hcu_depth_map, result_handle_key, op.hcu_cost), + caller: op.caller, + result_handle: op.result, + input_handles: op.inputs + } + end) + end + + defp parse_transaction_logs(_), do: [] + + defp parse_single_log(log) do + event_name = Parser.get_event_name(log.first_topic) + caller = extract_caller_binary(log.second_topic) + operation_data = Parser.decode_event_data(log, event_name) + + fhe_type = Parser.extract_fhe_type(operation_data, event_name) + is_scalar = Map.get(operation_data, :scalar_byte) == <<0x01>> + hcu_cost = Parser.calculate_hcu_cost(event_name, fhe_type, is_scalar) + + %{ + log_index: log.index, + operation: event_name, + type: Parser.get_operation_type(event_name), + fhe_type: fhe_type, + is_scalar: is_scalar, + hcu_cost: hcu_cost, + caller: caller, + inputs: Parser.extract_inputs(operation_data, event_name), + result: operation_data[:result] || <<0::256>> + } + end + + # Helper functions + + defp sanitize_first_topic(%Explorer.Chain.Data{bytes: bytes}), do: "0x" <> Base.encode16(bytes, case: :lower) + defp sanitize_first_topic(topic) when is_binary(topic), do: String.downcase(topic) + defp sanitize_first_topic(_), do: "" + + # We need specific binary extraction for caller here because Indexer might deal with raw binaries differently than Explorer + defp extract_caller_binary(nil), do: nil + + defp extract_caller_binary("0x" <> hex_data) when byte_size(hex_data) == 64 do + case Base.decode16(hex_data, case: :mixed) do + {:ok, bytes} -> extract_caller_binary(bytes) + _ -> nil + end + end + + defp extract_caller_binary(topic) when is_binary(topic) and byte_size(topic) == 32 do + <<_::binary-size(12), address::binary-size(20)>> = topic + address + end + + defp extract_caller_binary(%Explorer.Chain.Hash{bytes: bytes}) when byte_size(bytes) >= 32 do + <<_::binary-size(12), address::binary-size(20)>> = bytes + address + end + + defp extract_caller_binary(%Explorer.Chain.Data{bytes: bytes}) when byte_size(bytes) >= 32 do + <<_::binary-size(12), address::binary-size(20)>> = bytes + address + end + + defp extract_caller_binary(_), do: nil +end diff --git a/config/runtime.exs b/config/runtime.exs index 17d242e8f24a..b000138ab927 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -1026,6 +1026,9 @@ config :indexer, :arc, config :indexer, Indexer.Supervisor, enabled: !disable_indexer? +config :indexer, Indexer.Transform.FheOperations, + enabled: ConfigHelper.parse_bool_env_var("INDEXER_FHE_OPERATIONS_ENABLED", "false") + config :indexer, Indexer.Fetcher.TransactionAction.Supervisor, enabled: ConfigHelper.parse_bool_env_var("INDEXER_TX_ACTIONS_ENABLE") diff --git a/cspell.json b/cspell.json index 85bf05079860..f3cf38a0f65d 100644 --- a/cspell.json +++ b/cspell.json @@ -148,6 +148,7 @@ "clusterization", "codecv", "codeformat", + "codegen", "coef", "coeff", "coinprice", @@ -285,6 +286,7 @@ "fvdskvjglav", "fwrite", "fwupv", + "fhevm", "geas", "getabi", "getbalance", @@ -318,6 +320,7 @@ "hardfork", "haspopup", "Hazkne", + "Homomorphic", "healthcheck", "histoday", "hljs", @@ -579,6 +582,8 @@ "reuseaddr", "rollup", "rollups", + "Rotr", + "rotr", "RPC's", "RPCs", "rudimentaries", @@ -773,6 +778,7 @@ "Xerom", "xffff", "xlevel", + "xinvalid", "xlink", "xmark", "xmlhttprequest", From 24b47ed7bea909dd01bafbfc22271d428382403f Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 11 Mar 2026 11:14:37 +0300 Subject: [PATCH 12/42] Add INDEXER_FHE_OPERATIONS_ENABLED to common-blockscout.env --- docker-compose/envs/common-blockscout.env | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 1421a28bb55e..1fe4cf4c247c 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -221,6 +221,7 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_TOKEN_INSTANCE_REFETCH_BATCH_SIZE=10 # INDEXER_TOKEN_INSTANCE_REFETCH_CONCURRENCY= # INDEXER_SIGNED_AUTHORIZATION_STATUS_BATCH_SIZE= +# INDEXER_FHE_OPERATIONS_ENABLED=false # INDEXER_MULTICHAIN_SEARCH_DB_EXPORT_MAIN_QUEUE_BATCH_SIZE= # INDEXER_MULTICHAIN_SEARCH_DB_EXPORT_MAIN_QUEUE_CONCURRENCY= # INDEXER_MULTICHAIN_SEARCH_DB_EXPORT_MAIN_QUEUE_ENQUEUE_BUSY_WAITING_TIMEOUT= From 3f7283e3c0e9cf23543859558b88249ee601d19c Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 12 Mar 2026 11:30:58 +0300 Subject: [PATCH 13/42] Improved PR template description --- PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md index 91ea9121b82f..15ab63d31d24 100644 --- a/PULL_REQUEST_TEMPLATE.md +++ b/PULL_REQUEST_TEMPLATE.md @@ -31,6 +31,6 @@ _If you have any Incompatible Changes in the above Changelog, outline how users - [ ] General docs: submitted PR to [docs repository](https://github.com/blockscout/docs). - [ ] ENV vars: updated [env vars list](https://github.com/blockscout/docs/tree/main/setup/env-variables) and set version parameter to `master`. - [ ] Deprecated vars: added to [deprecated env vars list](https://github.com/blockscout/docs/tree/main/setup/env-variables/deprecated-env-variables). -- [ ] If I modified API endpoints, I updated the Swagger/OpenAPI schemas accordingly and checked that schemas are asserted in tests. +- [ ] If I modified API endpoints, I updated the Swagger/OpenAPI schemas accordingly and checked that schemas are asserted in tests, and highlighted the change in the PR description. - [ ] If I added new DB indices, I checked, that they are not redundant, with PGHero or other tools. - [ ] If I added/removed chain type, I modified the Github CI matrix and PR labels accordingly. From a8fc3e60ff1ae30ae9cfd5306f27308d64d819e6 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Thu, 12 Mar 2026 18:33:34 +0300 Subject: [PATCH 14/42] fix: state changes handle ERC-7984; nil tx.value (#14101) --- .../models/transaction_state_helper.ex | 83 ++++++---- .../api/v2/transaction_controller_test.exs | 143 ++++++++++++++++++ .../chain/transaction/state_change.ex | 15 +- 3 files changed, 206 insertions(+), 35 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/models/transaction_state_helper.ex b/apps/block_scout_web/lib/block_scout_web/models/transaction_state_helper.ex index 78c8cc7794e0..d9f1f0ac4497 100644 --- a/apps/block_scout_web/lib/block_scout_web/models/transaction_state_helper.ex +++ b/apps/block_scout_web/lib/block_scout_web/models/transaction_state_helper.ex @@ -9,7 +9,16 @@ defmodule BlockScoutWeb.Models.TransactionStateHelper do import Explorer.Chain.SmartContract.Proxy.Models.Implementation, only: [proxy_implementations_association: 0] alias Explorer.{Chain, PagingOptions, Repo} - alias Explorer.Chain.{Address.CoinBalance, BlockNumberHelper, InternalTransaction, Transaction, Wei} + + alias Explorer.Chain.{ + Address.CoinBalance, + BlockNumberHelper, + DenormalizationHelper, + InternalTransaction, + Transaction, + Wei + } + alias Explorer.Chain.Cache.StateChanges alias Explorer.Chain.Transaction.StateChange alias Indexer.Fetcher.OnDemand.CoinBalance, as: CoinBalanceOnDemand @@ -195,36 +204,48 @@ defmodule BlockScoutWeb.Models.TransactionStateHelper do end defp token_transfers_to_balances_reducer(transfer, balances, prev_block, options) do - from = transfer.from_address - to = transfer.to_address - token_hash = transfer.token_contract_address_hash - - balances - |> case do - # from address already in the map - %{^from => %{^token_hash => _}} = balances -> - balances - - # we need to add from address into the map - balances -> - put_in( - balances, - Enum.map([from, token_hash], &Access.key(&1, %{})), - token_balances(from.hash, transfer, prev_block, options) - ) - end - |> case do - # to address already in the map - %{^to => %{^token_hash => _}} = balances -> - balances - - # we need to add to address into the map - balances -> - put_in( - balances, - Enum.map([to, token_hash], &Access.key(&1, %{})), - token_balances(to.hash, transfer, prev_block, options) - ) + token_type = + if DenormalizationHelper.tt_denormalization_finished?() do + transfer.token_type + else + transfer.token && transfer.token.type + end + + # Skip ERC-7984 (confidential) transfers - we can't track encrypted balances + if token_type == "ERC-7984" do + balances + else + from = transfer.from_address + to = transfer.to_address + token_hash = transfer.token_contract_address_hash + + balances + |> case do + # from address already in the map + %{^from => %{^token_hash => _}} = balances -> + balances + + # we need to add from address into the map + balances -> + put_in( + balances, + Enum.map([from, token_hash], &Access.key(&1, %{})), + token_balances(from.hash, transfer, prev_block, options) + ) + end + |> case do + # to address already in the map + %{^to => %{^token_hash => _}} = balances -> + balances + + # we need to add to address into the map + balances -> + put_in( + balances, + Enum.map([to, token_hash], &Access.key(&1, %{})), + token_balances(to.hash, transfer, prev_block, options) + ) + end end end end diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs index e4bd0655e889..a403bcdc8161 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/transaction_controller_test.exs @@ -1953,6 +1953,149 @@ defmodule BlockScoutWeb.API.V2.TransactionControllerTest do assert token_data["name"] == "Scam Token" assert token_data["address_hash"] == to_string(token.contract_address) end + + test "return state changes with null value internal transaction", %{conn: conn} do + block_before = insert(:block) + + transaction = + :transaction + |> insert() + |> with_block(status: :ok) + + internal_transaction = + insert(:internal_transaction, + call_type: :call, + transaction_hash: transaction.hash, + transaction: transaction, + index: 1, + block_number: transaction.block_number, + transaction_index: transaction.index, + block_hash: transaction.block_hash, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:internal_transaction, + call_type: :call, + transaction_hash: transaction.hash, + transaction: transaction, + index: 2, + block_number: transaction.block_number, + transaction_index: transaction.index, + block_hash: transaction.block_hash, + value: nil, + from_address_hash: internal_transaction.from_address_hash, + from_address: internal_transaction.from_address, + to_address_hash: internal_transaction.to_address_hash, + to_address: internal_transaction.to_address + ) + + insert(:address_coin_balance, + address: transaction.from_address, + address_hash: transaction.from_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: transaction.to_address, + address_hash: transaction.to_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: transaction.block.miner, + address_hash: transaction.block.miner_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: internal_transaction.from_address, + address_hash: internal_transaction.from_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: internal_transaction.to_address, + address_hash: internal_transaction.to_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/state-changes") + + assert response = json_response(request, 200) + assert Enum.count(response["items"]) == 5 + end + + test "return state changes with ERC-7984 token transfer", %{conn: conn} do + block_before = insert(:block) + + transaction = + :transaction + |> insert() + |> with_block(status: :ok) + + confidential_token = insert(:token, type: "ERC-7984", symbol: "CT", name: "Confidential Token") + erc20_token = insert(:token, type: "ERC-20", symbol: "ERC20", name: "ERC20 Token") + + erc20_token_transfer = + insert(:token_transfer, + token_type: "ERC-20", + transaction: transaction, + transaction_hash: transaction.hash, + block: transaction.block, + block_number: transaction.block_number, + token_contract_address: erc20_token.contract_address, + amount: Decimal.new(100), + token_ids: nil + ) + + from_address = erc20_token_transfer.from_address + to_address = erc20_token_transfer.to_address + + # Create ERC-7984 token transfer - should be skipped in state changes + insert(:token_transfer, + token_type: "ERC-7984", + transaction: transaction, + transaction_hash: transaction.hash, + block: transaction.block, + block_number: transaction.block_number, + token_contract_address: confidential_token.contract_address, + from_address: from_address, + to_address: to_address, + amount: nil, + token_ids: nil + ) + + insert(:address_coin_balance, + address: transaction.from_address, + address_hash: transaction.from_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: transaction.to_address, + address_hash: transaction.to_address_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + insert(:address_coin_balance, + address: transaction.block.miner, + address_hash: transaction.block.miner_hash, + block_number: block_before.number, + value: %Wei{value: Decimal.new(1000)} + ) + + request = get(conn, "/api/v2/transactions/#{to_string(transaction.hash)}/state-changes") + + assert response = json_response(request, 200) + assert Enum.count(response["items"]) == 5 + end end if @chain_identity == {:optimism, :celo} do diff --git a/apps/explorer/lib/explorer/chain/transaction/state_change.ex b/apps/explorer/lib/explorer/chain/transaction/state_change.ex index 1602a8038618..ea0533104ce2 100644 --- a/apps/explorer/lib/explorer/chain/transaction/state_change.ex +++ b/apps/explorer/lib/explorer/chain/transaction/state_change.ex @@ -112,8 +112,15 @@ defmodule Explorer.Chain.Transaction.StateChange do end defp token_transfers_balances_reducer(transfer, state_balances_map, include_transfers) do + token_type = + if DenormalizationHelper.tt_denormalization_finished?() do + transfer.token_type + else + transfer.token && transfer.token.type + end + # Skip ERC-7984 (confidential) transfers - we can't track encrypted balances - if transfer.token && transfer.token.type == "ERC-7984" do + if token_type == "ERC-7984" do state_balances_map else from = transfer.from_address @@ -201,7 +208,7 @@ defmodule Explorer.Chain.Transaction.StateChange do end def from_loss(%InternalTransaction{} = transaction) do - transaction.value + transaction.value || Wei.zero() end @doc """ @@ -211,14 +218,14 @@ defmodule Explorer.Chain.Transaction.StateChange do @spec to_profit(Transaction.t() | InternalTransaction.t()) :: Wei.t() def to_profit(%Transaction{} = transaction) do if error?(transaction) do - %Wei{value: 0} + Wei.zero() else transaction.value end end def to_profit(%InternalTransaction{} = transaction) do - transaction.value + transaction.value || Wei.zero() end # Calculates block miner profit for the given transaction. From 08bd9ea696417e43f69a556ee655f2226dddff4b Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Thu, 12 Mar 2026 19:33:56 +0400 Subject: [PATCH 15/42] fix: add missing query params in user ops swagger spec (#14104) --- .../proxy/account_abstraction_controller.ex | 20 +++- .../block_scout_web/schemas/api/v2/general.ex | 98 +++++++++++++++++++ 2 files changed, 115 insertions(+), 3 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex index 362518f54956..3a559f1d8ab3 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/api/v2/proxy/account_abstraction_controller.ex @@ -259,7 +259,7 @@ defmodule BlockScoutWeb.API.V2.Proxy.AccountAbstractionController do operation :accounts, summary: "List of account abstraction wallets", description: "Retrieves a list of account abstraction wallets.", - parameters: base_params() ++ define_paging_params(["page_size", "page_token"]), + parameters: [factory_address_hash_param() | base_params()] ++ define_paging_params(["page_size", "page_token"]), responses: [ ok: {"List of account abstraction wallets with pagination.", "application/json", @@ -287,7 +287,10 @@ defmodule BlockScoutWeb.API.V2.Proxy.AccountAbstractionController do operation :bundles, summary: "List of recent bundles", description: "Retrieves a list of recent bundles.", - parameters: base_params() ++ define_paging_params(["page_size", "page_token"]), + parameters: + base_params() ++ + [bundler_address_hash_param(), entry_point_address_hash_param()] ++ + define_paging_params(["page_size", "page_token"]), responses: [ ok: {"List of bundles with pagination.", "application/json", @@ -315,7 +318,18 @@ defmodule BlockScoutWeb.API.V2.Proxy.AccountAbstractionController do operation :operations, summary: "List of recent user operations", description: "Retrieves a list of recent user operations.", - parameters: base_params() ++ define_paging_params(["page_size", "page_token"]), + parameters: + base_params() ++ + [ + sender_address_hash_param(), + bundler_address_hash_param(), + paymaster_address_hash_param(), + factory_address_hash_param(), + query_transaction_hash_param(), + entry_point_address_hash_param(), + bundle_index_param(), + query_block_number_param() + ] ++ define_paging_params(["page_size", "page_token"]), responses: [ ok: {"List of user operations with pagination.", "application/json", diff --git a/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/general.ex b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/general.ex index 95d852edba7b..5751dfad7cfc 100644 --- a/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/general.ex +++ b/apps/block_scout_web/lib/block_scout_web/schemas/api/v2/general.ex @@ -852,6 +852,104 @@ defmodule BlockScoutWeb.Schemas.API.V2.General do } end + @doc """ + Returns a parameter definition for a user operation sender address hash in the query. + """ + @spec sender_address_hash_param() :: Parameter.t() + def sender_address_hash_param do + %Parameter{ + name: :sender, + in: :query, + schema: AddressHash, + required: false, + description: "User operation sender address hash" + } + end + + @doc """ + Returns a parameter definition for a user operation bundler address hash in the query. + """ + @spec bundler_address_hash_param() :: Parameter.t() + def bundler_address_hash_param do + %Parameter{ + name: :bundler, + in: :query, + schema: AddressHash, + required: false, + description: "User operation bundler address hash" + } + end + + @doc """ + Returns a parameter definition for a user operation paymaster address hash in the query. + """ + @spec paymaster_address_hash_param() :: Parameter.t() + def paymaster_address_hash_param do + %Parameter{ + name: :paymaster, + in: :query, + schema: AddressHash, + required: false, + description: "User operation paymaster address hash" + } + end + + @doc """ + Returns a parameter definition for a user operation factory address hash in the query. + """ + @spec factory_address_hash_param() :: Parameter.t() + def factory_address_hash_param do + %Parameter{ + name: :factory, + in: :query, + schema: AddressHash, + required: false, + description: "User operation factory address hash" + } + end + + @doc """ + Returns a parameter definition for a user operation entry point address hash in the query. + """ + @spec entry_point_address_hash_param() :: Parameter.t() + def entry_point_address_hash_param do + %Parameter{ + name: :entry_point, + in: :query, + schema: AddressHash, + required: false, + description: "User operation entry point address hash" + } + end + + @doc """ + Returns a parameter definition for a user operation bundle index in the query. + """ + @spec bundle_index_param() :: Parameter.t() + def bundle_index_param do + %Parameter{ + name: :bundle_index, + in: :query, + schema: %Schema{type: :integer, minimum: 0}, + required: false, + description: "User operation bundle index" + } + end + + @doc """ + Returns a parameter definition for a user operation block number in the query. + """ + @spec query_block_number_param() :: Parameter.t() + def query_block_number_param do + %Parameter{ + name: :block_number, + in: :query, + schema: %Schema{type: :integer, minimum: 0}, + required: false, + description: "User operation block number" + } + end + @doc """ Returns a list of base parameters (api_key and key). """ From e1ff98255c7a6832dc74907d8e4d922f05208265 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Fri, 13 Mar 2026 18:24:44 +0400 Subject: [PATCH 16/42] fix: Add dependency between heavy internal transactions migrations (#14107) --- ...l_transactions_block_hash_transaction_index_index_index.ex | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/explorer/lib/explorer/migrator/heavy_db_index_operation/drop_internal_transactions_block_hash_transaction_index_index_index.ex b/apps/explorer/lib/explorer/migrator/heavy_db_index_operation/drop_internal_transactions_block_hash_transaction_index_index_index.ex index 757617cad3b7..23a2995af7b6 100644 --- a/apps/explorer/lib/explorer/migrator/heavy_db_index_operation/drop_internal_transactions_block_hash_transaction_index_index_index.ex +++ b/apps/explorer/lib/explorer/migrator/heavy_db_index_operation/drop_internal_transactions_block_hash_transaction_index_index_index.ex @@ -11,6 +11,7 @@ defmodule Explorer.Migrator.HeavyDbIndexOperation.DropInternalTransactionsBlockH MigrationStatus } + alias Explorer.Migrator.HeavyDbIndexOperation.DropInternalTransactionsCreatedContractAddressHashPartialIndex alias Explorer.Migrator.HeavyDbIndexOperation.Helper, as: HeavyDbIndexOperationHelper @table_name :internal_transactions @@ -29,7 +30,8 @@ defmodule Explorer.Migrator.HeavyDbIndexOperation.DropInternalTransactionsBlockH @impl HeavyDbIndexOperation def dependent_from_migrations do [ - EmptyInternalTransactionsData.migration_name() + EmptyInternalTransactionsData.migration_name(), + DropInternalTransactionsCreatedContractAddressHashPartialIndex.migration_name() ] end From 732ec4eabbc3fcb2649ebe74d5710e864ad080a8 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Fri, 13 Mar 2026 17:51:28 +0300 Subject: [PATCH 17/42] Update CHANGELOG --- CHANGELOG.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f66556fd846e..9cfcaed2100d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## 10.0.6 + +### 🐛 Bug Fixes + +- Add dependency between heavy internal transactions migrations ([#14107](https://github.com/blockscout/blockscout/issues/14107)) + + +## 10.0.5 + +### 🐛 Bug Fixes + +- Add missing query params in user ops swagger spec ([#14104](https://github.com/blockscout/blockscout/issues/14104)) +- State changes handle ERC-7984; nil tx.value ([#14101](https://github.com/blockscout/blockscout/issues/14101)) + + +## 10.0.4 + +### 🐛 Bug Fixes + +- `confirm_otp` after `OpenApiSpex` integration ([#14098](https://github.com/blockscout/blockscout/issues/14098)) + + ## 10.0.3 ### ⚙️ Miscellaneous Tasks From 7337a80d8a8c216aae2e53027611d98bf7b07f36 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Fri, 13 Mar 2026 18:03:33 +0300 Subject: [PATCH 18/42] chore: Return automatic chromedriver version definition (#14108) --- bin/install_chrome_headless.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bin/install_chrome_headless.sh b/bin/install_chrome_headless.sh index fab11cd9b32e..62d48923bc1d 100755 --- a/bin/install_chrome_headless.sh +++ b/bin/install_chrome_headless.sh @@ -1,8 +1,7 @@ export DISPLAY=:99.0 sh -e /etc/init.d/xvfb start -#export CHROMEDRIVER_VERSION=$(curl -s "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions.json" | jq -r '.channels' | jq -r '.Stable' | jq -r '.version') -export CHROMEDRIVER_VERSION=144.0.7559.133 +export CHROMEDRIVER_VERSION=$(curl -s "https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions.json" | jq -r '.channels' | jq -r '.Stable' | jq -r '.version') curl -L -O "https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip" unzip -j chromedriver-linux64.zip sudo chmod +x chromedriver From e93798342aadfc2c688c4243c471a40790ae63f6 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Mon, 16 Mar 2026 12:04:58 +0300 Subject: [PATCH 19/42] Bump version to 10.0.6 --- .github/workflows/generate-swagger.yml | 2 +- .github/workflows/pre-release-arbitrum.yml | 2 +- .github/workflows/pre-release-celo.yml | 2 +- .github/workflows/pre-release-eth.yml | 2 +- .github/workflows/pre-release-filecoin.yml | 2 +- .github/workflows/pre-release-fuse.yml | 2 +- .github/workflows/pre-release-gnosis.yml | 2 +- .github/workflows/pre-release-optimism.yml | 2 +- .github/workflows/pre-release-polygon-zkevm.yml | 2 +- .github/workflows/pre-release-rootstock.yml | 2 +- .github/workflows/pre-release-scroll.yml | 2 +- .github/workflows/pre-release-zilliqa.yml | 2 +- .github/workflows/pre-release-zksync.yml | 2 +- .github/workflows/pre-release.yml | 2 +- .github/workflows/publish-docker-image-custom-build.yml | 2 +- .github/workflows/publish-docker-image-every-push.yml | 2 +- .github/workflows/publish-docker-image-for-arbitrum.yml | 2 +- .github/workflows/publish-docker-image-for-celo.yml | 2 +- .github/workflows/publish-docker-image-for-core.yml | 2 +- .github/workflows/publish-docker-image-for-eth-sepolia.yml | 2 +- .github/workflows/publish-docker-image-for-eth.yml | 2 +- .github/workflows/publish-docker-image-for-filecoin.yml | 2 +- .github/workflows/publish-docker-image-for-fuse.yml | 2 +- .github/workflows/publish-docker-image-for-gnosis-chain.yml | 2 +- .github/workflows/publish-docker-image-for-l2-staging.yml | 2 +- .github/workflows/publish-docker-image-for-lukso.yml | 2 +- .../publish-docker-image-for-optimism-exeperimental.yml | 2 +- .../workflows/publish-docker-image-for-optimism-worldchain.yml | 2 +- .github/workflows/publish-docker-image-for-optimism.yml | 2 +- .github/workflows/publish-docker-image-for-rootstock.yml | 2 +- .github/workflows/publish-docker-image-for-scroll.yml | 2 +- .github/workflows/publish-docker-image-for-zetachain.yml | 2 +- .github/workflows/publish-docker-image-for-zilliqa.yml | 2 +- .github/workflows/publish-docker-image-for-zkevm.yml | 2 +- .github/workflows/publish-docker-image-for-zksync.yml | 2 +- .github/workflows/publish-docker-image-old-ui.yml | 2 +- .github/workflows/publish-docker-image-staging-on-demand.yml | 2 +- .github/workflows/publish-regular-docker-image-on-demand.yml | 2 +- .github/workflows/release-arbitrum.yml | 2 +- .github/workflows/release-celo.yml | 2 +- .github/workflows/release-default.yml | 2 +- .github/workflows/release-eth.yml | 2 +- .github/workflows/release-filecoin.yml | 2 +- .github/workflows/release-fuse.yml | 2 +- .github/workflows/release-gnosis.yml | 2 +- .github/workflows/release-optimism.yml | 2 +- .github/workflows/release-polygon-zkevm.yml | 2 +- .github/workflows/release-rootstock.yml | 2 +- .github/workflows/release-scroll.yml | 2 +- .github/workflows/release-zetachain.yml | 2 +- .github/workflows/release-zilliqa.yml | 2 +- .github/workflows/release-zksync.yml | 2 +- apps/block_scout_web/mix.exs | 2 +- apps/ethereum_jsonrpc/mix.exs | 2 +- apps/explorer/lib/explorer/token/metadata_retriever.ex | 2 +- apps/explorer/mix.exs | 2 +- apps/indexer/mix.exs | 2 +- apps/nft_media_handler/mix.exs | 2 +- apps/utils/mix.exs | 2 +- docker/Makefile | 2 +- mix.exs | 2 +- rel/config.exs | 2 +- 62 files changed, 62 insertions(+), 62 deletions(-) diff --git a/.github/workflows/generate-swagger.yml b/.github/workflows/generate-swagger.yml index 9044072c04c5..37d4d43d4a35 100644 --- a/.github/workflows/generate-swagger.yml +++ b/.github/workflows/generate-swagger.yml @@ -17,7 +17,7 @@ on: env: OTP_VERSION: '27.3.4.6' ELIXIR_VERSION: '1.19.4' - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 jobs: matrix-builder: diff --git a/.github/workflows/pre-release-arbitrum.yml b/.github/workflows/pre-release-arbitrum.yml index 1fb2d7fb21e7..9dc7a0389227 100644 --- a/.github/workflows/pre-release-arbitrum.yml +++ b/.github/workflows/pre-release-arbitrum.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-celo.yml b/.github/workflows/pre-release-celo.yml index 1ec77ff2d62a..571a2d1d0eb0 100644 --- a/.github/workflows/pre-release-celo.yml +++ b/.github/workflows/pre-release-celo.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/pre-release-eth.yml b/.github/workflows/pre-release-eth.yml index 907debaedf1c..3679b49b5279 100644 --- a/.github/workflows/pre-release-eth.yml +++ b/.github/workflows/pre-release-eth.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-filecoin.yml b/.github/workflows/pre-release-filecoin.yml index c9b7973c6cb1..4ab2a0b75267 100644 --- a/.github/workflows/pre-release-filecoin.yml +++ b/.github/workflows/pre-release-filecoin.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-fuse.yml b/.github/workflows/pre-release-fuse.yml index 11e8d3ad0e5a..5caa84c393ad 100644 --- a/.github/workflows/pre-release-fuse.yml +++ b/.github/workflows/pre-release-fuse.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-gnosis.yml b/.github/workflows/pre-release-gnosis.yml index 91535d53abb1..47c355537142 100644 --- a/.github/workflows/pre-release-gnosis.yml +++ b/.github/workflows/pre-release-gnosis.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-optimism.yml b/.github/workflows/pre-release-optimism.yml index f975db403cd5..96ed49741c2b 100644 --- a/.github/workflows/pre-release-optimism.yml +++ b/.github/workflows/pre-release-optimism.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-polygon-zkevm.yml b/.github/workflows/pre-release-polygon-zkevm.yml index b1f4a47994a9..277650b9806c 100644 --- a/.github/workflows/pre-release-polygon-zkevm.yml +++ b/.github/workflows/pre-release-polygon-zkevm.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-rootstock.yml b/.github/workflows/pre-release-rootstock.yml index d2b0401aa961..56a99faad6ef 100644 --- a/.github/workflows/pre-release-rootstock.yml +++ b/.github/workflows/pre-release-rootstock.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-scroll.yml b/.github/workflows/pre-release-scroll.yml index 29354e646883..d94c747ba6fc 100644 --- a/.github/workflows/pre-release-scroll.yml +++ b/.github/workflows/pre-release-scroll.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-zilliqa.yml b/.github/workflows/pre-release-zilliqa.yml index fed22d853d68..9cae20671b6d 100644 --- a/.github/workflows/pre-release-zilliqa.yml +++ b/.github/workflows/pre-release-zilliqa.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release-zksync.yml b/.github/workflows/pre-release-zksync.yml index 808bf2f40616..790a8e47f465 100644 --- a/.github/workflows/pre-release-zksync.yml +++ b/.github/workflows/pre-release-zksync.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 82bc51181674..63b13dbf0943 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -21,7 +21,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/publish-docker-image-custom-build.yml b/.github/workflows/publish-docker-image-custom-build.yml index f50b534a6bf4..b675d12d6092 100644 --- a/.github/workflows/publish-docker-image-custom-build.yml +++ b/.github/workflows/publish-docker-image-custom-build.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/publish-docker-image-every-push.yml b/.github/workflows/publish-docker-image-every-push.yml index 89919027b990..8d0c9f1b8aeb 100644 --- a/.github/workflows/publish-docker-image-every-push.yml +++ b/.github/workflows/publish-docker-image-every-push.yml @@ -11,7 +11,7 @@ on: env: OTP_VERSION: '27.3.4.6' ELIXIR_VERSION: '1.19.4' - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 permissions: contents: read diff --git a/.github/workflows/publish-docker-image-for-arbitrum.yml b/.github/workflows/publish-docker-image-for-arbitrum.yml index b3cd9d66fcf3..8a6eb25155d9 100644 --- a/.github/workflows/publish-docker-image-for-arbitrum.yml +++ b/.github/workflows/publish-docker-image-for-arbitrum.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: arbitrum steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-celo.yml b/.github/workflows/publish-docker-image-for-celo.yml index 94a860d4d418..a9088a007aea 100644 --- a/.github/workflows/publish-docker-image-for-celo.yml +++ b/.github/workflows/publish-docker-image-for-celo.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: optimism-celo API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: diff --git a/.github/workflows/publish-docker-image-for-core.yml b/.github/workflows/publish-docker-image-for-core.yml index 8a389dd2519b..5dd8f4c894e2 100644 --- a/.github/workflows/publish-docker-image-for-core.yml +++ b/.github/workflows/publish-docker-image-for-core.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: poa steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-eth-sepolia.yml b/.github/workflows/publish-docker-image-for-eth-sepolia.yml index d18eef4d3f46..17baaeebfb29 100644 --- a/.github/workflows/publish-docker-image-for-eth-sepolia.yml +++ b/.github/workflows/publish-docker-image-for-eth-sepolia.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: eth-sepolia steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-eth.yml b/.github/workflows/publish-docker-image-for-eth.yml index 0532c162424b..e6bf2a024bea 100644 --- a/.github/workflows/publish-docker-image-for-eth.yml +++ b/.github/workflows/publish-docker-image-for-eth.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: ethereum steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-filecoin.yml b/.github/workflows/publish-docker-image-for-filecoin.yml index 4e1ccfa95026..f1fa572cccfe 100644 --- a/.github/workflows/publish-docker-image-for-filecoin.yml +++ b/.github/workflows/publish-docker-image-for-filecoin.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: filecoin steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-fuse.yml b/.github/workflows/publish-docker-image-for-fuse.yml index 8a98a16fb023..ced2d5361739 100644 --- a/.github/workflows/publish-docker-image-for-fuse.yml +++ b/.github/workflows/publish-docker-image-for-fuse.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: fuse steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-gnosis-chain.yml b/.github/workflows/publish-docker-image-for-gnosis-chain.yml index 6142c7c1930c..90f7701ee2ef 100644 --- a/.github/workflows/publish-docker-image-for-gnosis-chain.yml +++ b/.github/workflows/publish-docker-image-for-gnosis-chain.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: xdai steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-l2-staging.yml b/.github/workflows/publish-docker-image-for-l2-staging.yml index d512cf477f85..f1615d2851d1 100644 --- a/.github/workflows/publish-docker-image-for-l2-staging.yml +++ b/.github/workflows/publish-docker-image-for-l2-staging.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: optimism-l2-advanced steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-lukso.yml b/.github/workflows/publish-docker-image-for-lukso.yml index 4493c1379961..19bd1b591fb5 100644 --- a/.github/workflows/publish-docker-image-for-lukso.yml +++ b/.github/workflows/publish-docker-image-for-lukso.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: lukso steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-optimism-exeperimental.yml b/.github/workflows/publish-docker-image-for-optimism-exeperimental.yml index 21c53b69fa82..69f795365fba 100644 --- a/.github/workflows/publish-docker-image-for-optimism-exeperimental.yml +++ b/.github/workflows/publish-docker-image-for-optimism-exeperimental.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: optimism steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-optimism-worldchain.yml b/.github/workflows/publish-docker-image-for-optimism-worldchain.yml index efd4ab07aca3..d6f53fb847e7 100644 --- a/.github/workflows/publish-docker-image-for-optimism-worldchain.yml +++ b/.github/workflows/publish-docker-image-for-optimism-worldchain.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: optimism steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-optimism.yml b/.github/workflows/publish-docker-image-for-optimism.yml index 5536edf43918..2dcf39af67bb 100644 --- a/.github/workflows/publish-docker-image-for-optimism.yml +++ b/.github/workflows/publish-docker-image-for-optimism.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: optimism steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-rootstock.yml b/.github/workflows/publish-docker-image-for-rootstock.yml index 381551bcf2c6..b025227cd68e 100644 --- a/.github/workflows/publish-docker-image-for-rootstock.yml +++ b/.github/workflows/publish-docker-image-for-rootstock.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: rsk steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-scroll.yml b/.github/workflows/publish-docker-image-for-scroll.yml index 6fbcfb7c09f0..f46d34619f15 100644 --- a/.github/workflows/publish-docker-image-for-scroll.yml +++ b/.github/workflows/publish-docker-image-for-scroll.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: scroll steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-zetachain.yml b/.github/workflows/publish-docker-image-for-zetachain.yml index 2a33c8e2a04a..4ef6c599e0d5 100644 --- a/.github/workflows/publish-docker-image-for-zetachain.yml +++ b/.github/workflows/publish-docker-image-for-zetachain.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: zetachain steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-zilliqa.yml b/.github/workflows/publish-docker-image-for-zilliqa.yml index 92fc5d58e03d..ae82a4af824b 100644 --- a/.github/workflows/publish-docker-image-for-zilliqa.yml +++ b/.github/workflows/publish-docker-image-for-zilliqa.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: zilliqa steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-zkevm.yml b/.github/workflows/publish-docker-image-for-zkevm.yml index 8a6d77230821..4b511f05f218 100644 --- a/.github/workflows/publish-docker-image-for-zkevm.yml +++ b/.github/workflows/publish-docker-image-for-zkevm.yml @@ -14,7 +14,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: zkevm steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-for-zksync.yml b/.github/workflows/publish-docker-image-for-zksync.yml index 86c0202562b7..6f3106efcb6c 100644 --- a/.github/workflows/publish-docker-image-for-zksync.yml +++ b/.github/workflows/publish-docker-image-for-zksync.yml @@ -13,7 +13,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 DOCKER_CHAIN_NAME: zksync steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/publish-docker-image-old-ui.yml b/.github/workflows/publish-docker-image-old-ui.yml index b2f1febb1adf..b31f20e21c3d 100644 --- a/.github/workflows/publish-docker-image-old-ui.yml +++ b/.github/workflows/publish-docker-image-old-ui.yml @@ -16,7 +16,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/publish-docker-image-staging-on-demand.yml b/.github/workflows/publish-docker-image-staging-on-demand.yml index b34742fce9c4..3d2117b51a98 100644 --- a/.github/workflows/publish-docker-image-staging-on-demand.yml +++ b/.github/workflows/publish-docker-image-staging-on-demand.yml @@ -12,7 +12,7 @@ on: env: OTP_VERSION: '27.3.4.6' ELIXIR_VERSION: '1.19.4' - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 permissions: contents: read diff --git a/.github/workflows/publish-regular-docker-image-on-demand.yml b/.github/workflows/publish-regular-docker-image-on-demand.yml index e0a3e276d3dd..83d54d0cbbab 100644 --- a/.github/workflows/publish-regular-docker-image-on-demand.yml +++ b/.github/workflows/publish-regular-docker-image-on-demand.yml @@ -5,7 +5,7 @@ on: env: OTP_VERSION: '27.3.4.6' ELIXIR_VERSION: '1.19.4' - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 permissions: contents: read diff --git a/.github/workflows/release-arbitrum.yml b/.github/workflows/release-arbitrum.yml index 4521e21296ed..38c22993d230 100644 --- a/.github/workflows/release-arbitrum.yml +++ b/.github/workflows/release-arbitrum.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-celo.yml b/.github/workflows/release-celo.yml index 6ef0a15abc48..c60840590743 100644 --- a/.github/workflows/release-celo.yml +++ b/.github/workflows/release-celo.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 API_GRAPHQL_MAX_COMPLEXITY: 10400 steps: - uses: actions/checkout@v5 diff --git a/.github/workflows/release-default.yml b/.github/workflows/release-default.yml index b3df43be0a04..d5fb327067e4 100644 --- a/.github/workflows/release-default.yml +++ b/.github/workflows/release-default.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-eth.yml b/.github/workflows/release-eth.yml index 8bf64cf8ffb0..f3c72d1291e6 100644 --- a/.github/workflows/release-eth.yml +++ b/.github/workflows/release-eth.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-filecoin.yml b/.github/workflows/release-filecoin.yml index 5cbdb94b4791..0a2f39ccaf5e 100644 --- a/.github/workflows/release-filecoin.yml +++ b/.github/workflows/release-filecoin.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-fuse.yml b/.github/workflows/release-fuse.yml index 849a1a086cd9..79d805a5dded 100644 --- a/.github/workflows/release-fuse.yml +++ b/.github/workflows/release-fuse.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-gnosis.yml b/.github/workflows/release-gnosis.yml index 3b87858b9e99..efefd174d5eb 100644 --- a/.github/workflows/release-gnosis.yml +++ b/.github/workflows/release-gnosis.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-optimism.yml b/.github/workflows/release-optimism.yml index 92a0b4bc81cf..e3315377743d 100644 --- a/.github/workflows/release-optimism.yml +++ b/.github/workflows/release-optimism.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-polygon-zkevm.yml b/.github/workflows/release-polygon-zkevm.yml index b70fbfbd31d1..4bff9d57948b 100644 --- a/.github/workflows/release-polygon-zkevm.yml +++ b/.github/workflows/release-polygon-zkevm.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-rootstock.yml b/.github/workflows/release-rootstock.yml index eee23bbbf3d9..1fdb9a4c532e 100644 --- a/.github/workflows/release-rootstock.yml +++ b/.github/workflows/release-rootstock.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-scroll.yml b/.github/workflows/release-scroll.yml index c3357d425a44..dc092e5b873b 100644 --- a/.github/workflows/release-scroll.yml +++ b/.github/workflows/release-scroll.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-zetachain.yml b/.github/workflows/release-zetachain.yml index 3760c2bfe736..dbca08a892ab 100644 --- a/.github/workflows/release-zetachain.yml +++ b/.github/workflows/release-zetachain.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-zilliqa.yml b/.github/workflows/release-zilliqa.yml index c83bd7b2eaf2..aae7b10a88a2 100644 --- a/.github/workflows/release-zilliqa.yml +++ b/.github/workflows/release-zilliqa.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/.github/workflows/release-zksync.yml b/.github/workflows/release-zksync.yml index babec7fbab9c..d8b42c123dd3 100644 --- a/.github/workflows/release-zksync.yml +++ b/.github/workflows/release-zksync.yml @@ -18,7 +18,7 @@ jobs: name: Push Docker image to GitHub Container Registry runs-on: build env: - RELEASE_VERSION: 10.0.1 + RELEASE_VERSION: 10.0.6 steps: - uses: actions/checkout@v5 - name: Setup repo diff --git a/apps/block_scout_web/mix.exs b/apps/block_scout_web/mix.exs index f4121ce84aa5..cf016ee524b6 100644 --- a/apps/block_scout_web/mix.exs +++ b/apps/block_scout_web/mix.exs @@ -19,7 +19,7 @@ defmodule BlockScoutWeb.Mixfile do lockfile: "../../mix.lock", package: package(), start_permanent: Mix.env() == :prod, - version: "10.0.1", + version: "10.0.6", xref: [ exclude: [ Explorer.Chain.PolygonZkevm.Reader, diff --git a/apps/ethereum_jsonrpc/mix.exs b/apps/ethereum_jsonrpc/mix.exs index c221d356da0c..80eaae6296bd 100644 --- a/apps/ethereum_jsonrpc/mix.exs +++ b/apps/ethereum_jsonrpc/mix.exs @@ -19,7 +19,7 @@ defmodule EthereumJSONRPC.MixProject do elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", start_permanent: Mix.env() == :prod, - version: "10.0.1" + version: "10.0.6" ] end diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index 28a0503c3e18..39d8a0eaf95d 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -13,7 +13,7 @@ defmodule Explorer.Token.MetadataRetriever do @no_uri_error "no uri" @vm_execution_error "VM execution error" @invalid_base64_data "invalid data:application/json;base64" - @default_headers [{"User-Agent", "blockscout-10.0.1"}] + @default_headers [{"User-Agent", "blockscout-10.0.6"}] # https://eips.ethereum.org/EIPS/eip-1155#metadata @erc1155_token_id_placeholder "{id}" diff --git a/apps/explorer/mix.exs b/apps/explorer/mix.exs index a2276ff26d5c..30d5de6c3283 100644 --- a/apps/explorer/mix.exs +++ b/apps/explorer/mix.exs @@ -20,7 +20,7 @@ defmodule Explorer.Mixfile do lockfile: "../../mix.lock", package: package(), start_permanent: Mix.env() == :prod, - version: "10.0.1", + version: "10.0.6", xref: [exclude: [BlockScoutWeb.Routers.WebRouter.Helpers, Indexer.Helper, Indexer.Fetcher.InternalTransaction]] ] end diff --git a/apps/indexer/mix.exs b/apps/indexer/mix.exs index d106e9cc532d..af4a6b21c54d 100644 --- a/apps/indexer/mix.exs +++ b/apps/indexer/mix.exs @@ -14,7 +14,7 @@ defmodule Indexer.MixProject do elixirc_paths: elixirc_paths(Mix.env()), lockfile: "../../mix.lock", start_permanent: Mix.env() == :prod, - version: "10.0.1", + version: "10.0.6", xref: [ exclude: [ Explorer.Chain.Optimism.Deposit, diff --git a/apps/nft_media_handler/mix.exs b/apps/nft_media_handler/mix.exs index 97bd4f5fa435..9dcea18315f6 100644 --- a/apps/nft_media_handler/mix.exs +++ b/apps/nft_media_handler/mix.exs @@ -4,7 +4,7 @@ defmodule NFTMediaHandler.MixProject do def project do [ app: :nft_media_handler, - version: "10.0.1", + version: "10.0.6", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", diff --git a/apps/utils/mix.exs b/apps/utils/mix.exs index f83172919312..49f5c6fe83a9 100644 --- a/apps/utils/mix.exs +++ b/apps/utils/mix.exs @@ -4,7 +4,7 @@ defmodule Utils.MixProject do def project do [ app: :utils, - version: "10.0.1", + version: "10.0.6", build_path: "../../_build", # config_path: "../../config/config.exs", deps_path: "../../deps", diff --git a/docker/Makefile b/docker/Makefile index 629a1b971bbc..153075fbc0e5 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -10,7 +10,7 @@ STATS_CONTAINER_NAME := stats STATS_DB_CONTAINER_NAME := stats-db PROXY_CONTAINER_NAME := proxy PG_CONTAINER_NAME := postgres -RELEASE_VERSION ?= '10.0.1' +RELEASE_VERSION ?= '10.0.6' TAG := $(RELEASE_VERSION)-commit-$(shell git log -1 --pretty=format:"%h") STABLE_TAG := $(RELEASE_VERSION) diff --git a/mix.exs b/mix.exs index be579acbb908..931f059eda49 100644 --- a/mix.exs +++ b/mix.exs @@ -7,7 +7,7 @@ defmodule BlockScout.Mixfile do [ # app: :block_scout, # aliases: aliases(config_env()), - version: "10.0.1", + version: "10.0.6", apps_path: "apps", deps: deps(), dialyzer: dialyzer(), diff --git a/rel/config.exs b/rel/config.exs index 8f1658eff3b4..ead82dda7c34 100644 --- a/rel/config.exs +++ b/rel/config.exs @@ -71,7 +71,7 @@ end # will be used by default release :blockscout do - set version: "10.0.1" + set version: "10.0.6" set applications: [ :runtime_tools, block_scout_web: :permanent, From d62abc175ce089ab9968d3979aa67748c1b23945 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Mon, 16 Mar 2026 18:08:37 +0300 Subject: [PATCH 20/42] fix: Zetachain internal txs fetching error (#14122) --- .../explorer/chain/pending_transaction_operation.ex | 2 +- .../lib/indexer/fetcher/internal_transaction.ex | 6 +++--- .../indexer/fetcher/internal_transaction_test.exs | 13 +++++++++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/pending_transaction_operation.ex b/apps/explorer/lib/explorer/chain/pending_transaction_operation.ex index dd84f1170830..0f7d085377c9 100644 --- a/apps/explorer/lib/explorer/chain/pending_transaction_operation.ex +++ b/apps/explorer/lib/explorer/chain/pending_transaction_operation.ex @@ -80,7 +80,7 @@ defmodule Explorer.Chain.PendingTransactionOperation do from( po in __MODULE__, join: t in assoc(po, :transaction), - select: %{block_number: t.block_number, hash: t.hash, index: t.index}, + select: %{block_number: t.block_number, hash: t.hash, index: t.index, type: t.type}, order_by: [{^direction, t.block_number}] ) diff --git a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex index 277f2b5782e5..a806a1e40bec 100644 --- a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex @@ -70,7 +70,7 @@ defmodule Indexer.Fetcher.InternalTransaction do case queue_data_type(json_rpc_named_arguments) do :block_number -> block_numbers - :transaction_params -> Enum.map(transactions, &Map.take(&1, [:block_number, :hash, :index])) + :transaction_params -> Enum.map(transactions, &Map.take(&1, [:block_number, :hash, :index, :type])) end end @@ -279,10 +279,10 @@ defmodule Indexer.Fetcher.InternalTransaction do @doc """ Filters out transactions that are known to not have traceable internal transactions. """ - @spec filter_non_traceable_transactions([Transaction.t()]) :: [Transaction.t()] + @spec filter_non_traceable_transactions([Transaction.t() | map()]) :: [Transaction.t() | map()] def filter_non_traceable_transactions(transactions) do case Application.get_env(:explorer, :chain_type) do - :zetachain -> Enum.reject(transactions, &(&1.type == @zetachain_non_traceable_type)) + :zetachain -> Enum.reject(transactions, &(Map.get(&1, :type) == @zetachain_non_traceable_type)) :zilliqa -> Enum.reject(transactions, &ZilliqaHelper.scilla_transaction?/1) _ -> transactions end diff --git a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs index 0855a4c3d1fd..de6575d02237 100644 --- a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs +++ b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs @@ -524,6 +524,19 @@ defmodule Indexer.Fetcher.InternalTransactionTest do assert %{block_number: ^block_number, block_hash: ^block_hash} = Repo.one(PendingBlockOperation) end + describe "filter_non_traceable_transactions/1" do + test "does not raise when transaction params do not include type on zetachain" do + chain_type = Application.get_env(:explorer, :chain_type) + Application.put_env(:explorer, :chain_type, :zetachain) + + on_exit(fn -> Application.put_env(:explorer, :chain_type, chain_type) end) + + transaction_params = %{block_number: 13_393_871, hash: "0x123", index: 427} + + assert [transaction_params] == InternalTransaction.filter_non_traceable_transactions([transaction_params]) + end + end + if Application.compile_env(:explorer, :chain_type) == :arbitrum do test "fetches internal transactions from Arbitrum", %{ json_rpc_named_arguments: json_rpc_named_arguments From ea819349051e1af32da6b96d115c2acd5a06a0ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Mar 2026 12:06:21 +0300 Subject: [PATCH 21/42] chore(deps): bump phoenix_live_view from 1.1.26 to 1.1.27 (#14129) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 871df9d7416b..e2871b30c1d9 100644 --- a/mix.lock +++ b/mix.lock @@ -135,7 +135,7 @@ "phoenix_html": {:hex, :phoenix_html, "4.2.1", "35279e2a39140068fc03f8874408d58eef734e488fc142153f055c5454fd1c08", [:mix], [], "hexpm", "cff108100ae2715dd959ae8f2a8cef8e20b593f8dfd031c9cba92702cf23e053"}, "phoenix_html_helpers": {:hex, :phoenix_html_helpers, "1.0.1", "7eed85c52eff80a179391036931791ee5d2f713d76a81d0d2c6ebafe1e11e5ec", [:mix], [{:phoenix_html, "~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "cffd2385d1fa4f78b04432df69ab8da63dc5cf63e07b713a4dcf36a3740e3090"}, "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.2", "b18b0773a1ba77f28c52decbb0f10fd1ac4d3ae5b8632399bbf6986e3b665f62", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "d1f89c18114c50d394721365ffb428cce24f1c13de0467ffa773e2ff4a30d5b9"}, - "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.26", "306af67d6557cc01f880107cc459f1fa0acbaab60bc8c027a368ba16b3544473", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "0ec34b24c69aa70c4f25a8901effe3462bee6c8ca80a9a4a7685215e3a0ac34e"}, + "phoenix_live_view": {:hex, :phoenix_live_view, "1.1.27", "9afcab28b0c82afdc51044e661bcd5b8de53d242593d34c964a37710b40a42af", [:mix], [{:igniter, ">= 0.6.16 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:lazy_html, "~> 0.1.0", [hex: :lazy_html, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "415735d0b2c612c9104108b35654e977626a0cb346711e1e4f1ed16e3c827ede"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.2.0", "ff3a5616e1bed6804de7773b92cbccfc0b0f473faf1f63d7daf1206c7aeaaa6f", [:mix], [], "hexpm", "adc313a5bf7136039f63cfd9668fde73bba0765e0614cba80c06ac9460ff3e96"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, "phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"}, From efe5f6f320124a7d5b96e010834eb14cfe7a32e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Mar 2026 12:07:14 +0300 Subject: [PATCH 22/42] chore(deps): bump ex_cldr_units from 3.20.1 to 3.20.2 (#14127) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mix.lock b/mix.lock index e2871b30c1d9..f82fd904bb44 100644 --- a/mix.lock +++ b/mix.lock @@ -20,7 +20,7 @@ "cbor": {:hex, :cbor, "1.0.1", "39511158e8ea5a57c1fcb9639aaa7efde67129678fee49ebbda780f6f24959b0", [:mix], [], "hexpm", "5431acbe7a7908f17f6a9cd43311002836a34a8ab01876918d8cfb709cd8b6a2"}, "cc_precompiler": {:hex, :cc_precompiler, "0.1.11", "8c844d0b9fb98a3edea067f94f616b3f6b29b959b6b3bf25fee94ffe34364768", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "3427232caf0835f94680e5bcf082408a70b48ad68a5f5c0b02a3bea9f3a075b9"}, "certifi": {:hex, :certifi, "2.15.0", "0e6e882fcdaaa0a5a9f2b3db55b1394dba07e8d6d9bcad08318fb604c6839712", [:rebar3], [], "hexpm", "b147ed22ce71d72eafdad94f055165c1c182f61a2ff49df28bcc71d1d5b94a60"}, - "cldr_utils": {:hex, :cldr_utils, "2.29.4", "11437b0bf9a0d57db4eccdf751c49f675a04fa4261c5dae1e23552a0347e25c9", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "e72a43e69a3f546979085cbdbeae7e9049998cd21cedfdd796cff9155998114e"}, + "cldr_utils": {:hex, :cldr_utils, "2.29.5", "f43161e04acb4016f5841b2320d69120d51827f5346babb2227893a2c5916dc8", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "962d3a2028b232ee0a5373941dc411028a9442f53444a4d5d2c354f687db1835"}, "cloak": {:hex, :cloak, "1.1.4", "aba387b22ea4d80d92d38ab1890cc528b06e0e7ef2a4581d71c3fdad59e997e7", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "92b20527b9aba3d939fab0dd32ce592ff86361547cfdc87d74edce6f980eb3d7"}, "cloak_ecto": {:hex, :cloak_ecto, "1.3.0", "0de127c857d7452ba3c3367f53fb814b0410ff9c680a8d20fbe8b9a3c57a1118", [:mix], [{:cloak, "~> 1.1.1", [hex: :cloak, repo: "hexpm", optional: false]}, {:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "314beb0c123b8a800418ca1d51065b27ba3b15f085977e65c0f7b2adab2de1cc"}, "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, @@ -55,11 +55,11 @@ "ex_aws": {:hex, :ex_aws, "2.6.1", "194582c7b09455de8a5ab18a0182e6dd937d53df82be2e63c619d01bddaccdfa", [:mix], [{:configparser_ex, "~> 5.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:req, "~> 0.5.10 or ~> 0.6 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "67842a08c90a1d9a09dbe4ac05754175c7ca253abe4912987c759395d4bd9d26"}, "ex_aws_s3": {:hex, :ex_aws_s3, "2.5.9", "862b7792f2e60d7010e2920d79964e3fab289bc0fd951b0ba8457a3f7f9d1199", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "a480d2bb2da64610014021629800e1e9457ca5e4a62f6775bffd963360c2bf90"}, "ex_brotli": {:hex, :ex_brotli, "0.5.0", "573645db5201317b6176b8858b668ea4ca89dc5c21852e84b9867579d483c220", [:mix], [{:phoenix, ">= 0.0.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:rustler, "~> 0.29", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.6", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "8447d98d51f8f312629fd38619d4f564507dcf3a03d175c3f8f4ddf98e46dd92"}, - "ex_cldr": {:hex, :ex_cldr, "2.47.1", "2dd2f0da2d5720bf413e0320cfd0ea7f0259a888c33e727c5f0db6bab3380252", [:mix], [{:cldr_utils, "~> 2.28", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19 or ~> 1.0", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "2555d6599d16311a096d8cb2d02e9dc3011ca02abbae446817d4f445a286c758"}, - "ex_cldr_currencies": {:hex, :ex_cldr_currencies, "2.17.0", "c38d76339dbee413f7dd1aba4cdf05758bd4c0bbfe9c3b1c8602f96082c2890a", [:mix], [{:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "9af59bd29407dcca59fa39ded8c1649ae1cf6ec29fd0611576dcad0279bce0db"}, - "ex_cldr_lists": {:hex, :ex_cldr_lists, "2.11.1", "ad18f861d7c5ca82aac6d173469c6a2339645c96790172ab0aa255b64fb7303b", [:mix], [{:ex_cldr_numbers, "~> 2.25", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "00161c04510ccb3f18b19a6b8562e50c21f1e9c15b8ff4c934bea5aad0b4ade2"}, - "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.38.0", "b5564b57d3769c85e16689472a9bb65804f71ccd3484144e31998398fda25ad1", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:digital_token, "~> 0.3 or ~> 1.0", [hex: :digital_token, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.45", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.17", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "b29e4d723c69db5d0a3f3bcef7583a0bc87dda1cd642187c589fec4bfc59a703"}, - "ex_cldr_units": {:hex, :ex_cldr_units, "3.20.1", "b27ec81814a67ed77d1c06c64ef74519b141497b74244ec18e1a1a2f78f8e313", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.10", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.36", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "879af22563b06570f16c28bed3decaadc0c1233906f4516b2d5d28e2bbbadee0"}, + "ex_cldr": {:hex, :ex_cldr, "2.47.2", "c866f4b45523abd25eea3e5252eb91364296dd15bddf970db1c78cd38f25df9a", [:mix], [{:cldr_utils, "~> 2.29", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.19 or ~> 1.0", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}], "hexpm", "4a7cef380a1c2546166b45d6ee5e8e2f707ea695b12ae6dadd250201588b4f16"}, + "ex_cldr_currencies": {:hex, :ex_cldr_currencies, "2.17.1", "89947c7102ff1b46fc46095624239a1c3d72499b19ed650597630771d9e4a662", [:mix], [{:ex_cldr, "~> 2.38", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "e266a0a61f4c7d83608154d49b59e4d7485b2aaa7ba1d0e17b3c55910595de51"}, + "ex_cldr_lists": {:hex, :ex_cldr_lists, "2.12.1", "8262d36a725bd77ab16cfeb8cac38efcd92f8e8039e2f8cf91164ec2cfb739a6", [:mix], [{:ex_cldr_numbers, "~> 2.25", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "946bafd71bd98d8109f59bdd66ea55824b663329ab1bbf677489e144fd9ddc8d"}, + "ex_cldr_numbers": {:hex, :ex_cldr_numbers, "2.38.1", "e5124e288a8e672831e10d39530ecb5329bc9af2169709ebfbadc814cae7d4fb", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:digital_token, "~> 0.3 or ~> 1.0", [hex: :digital_token, repo: "hexpm", optional: false]}, {:ex_cldr, "~> 2.45", [hex: :ex_cldr, repo: "hexpm", optional: false]}, {:ex_cldr_currencies, "~> 2.17", [hex: :ex_cldr_currencies, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "4f95738f1dc4e821485e52226666f7691c9276bf6eba49cba8d23c8a2db05e84"}, + "ex_cldr_units": {:hex, :ex_cldr_units, "3.20.2", "c9c99a5e8e921d87bfbd424f1d91c0db04cb6ea00e02161a50c57519130dc2ea", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ex_cldr_lists, "~> 2.10", [hex: :ex_cldr_lists, repo: "hexpm", optional: false]}, {:ex_cldr_numbers, "~> 2.36", [hex: :ex_cldr_numbers, repo: "hexpm", optional: false]}, {:ex_doc, "~> 0.18", [hex: :ex_doc, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "b6ae5a4bbf1252a9ddffc0dbfe31bc81c0f2545050376ad5bdf6e99292b5d9cf"}, "ex_doc": {:hex, :ex_doc, "0.40.1", "67542e4b6dde74811cfd580e2c0149b78010fd13001fda7cfeb2b2c2ffb1344d", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "bcef0e2d360d93ac19f01a85d58f91752d930c0a30e2681145feea6bd3516e00"}, "ex_eth_bls": {:hex, :ex_eth_bls, "0.1.0", "33c2bf424b360e4b64d7630dd72ec028dac63df56802d0a14ade54a23ad1c743", [:mix], [{:rustler, ">= 0.0.0", [hex: :rustler, repo: "hexpm", optional: true]}, {:rustler_precompiled, "~> 0.8", [hex: :rustler_precompiled, repo: "hexpm", optional: false]}], "hexpm", "25f6ffc36de4952e55adff1c712f0b9680850773678550f1da970d4d18329365"}, "ex_hash_ring": {:hex, :ex_hash_ring, "6.0.4", "bef9d2d796afbbe25ab5b5a7ed746e06b99c76604f558113c273466d52fa6d6b", [:mix], [], "hexpm", "89adabf31f7d3dfaa36802ce598ce918e9b5b33bae8909ac1a4d052e1e567d18"}, From 09d36f77f9dff77a826e59f37431a1c6d0421b38 Mon Sep 17 00:00:00 2001 From: Glencorse033 <103339634+Glencorse033@users.noreply.github.com> Date: Tue, 17 Mar 2026 16:25:05 +0100 Subject: [PATCH 23/42] feat: add search by token address hash in /api/v2/tokens (#14102) Co-authored-by: glencorse033 Co-authored-by: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> --- apps/explorer/lib/explorer/chain/token.ex | 12 ++--- .../test/explorer/chain/token_test.exs | 45 +++++++++++++++++++ 2 files changed, 52 insertions(+), 5 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/token.ex b/apps/explorer/lib/explorer/chain/token.ex index 7da6878ec51c..242d8d412f28 100644 --- a/apps/explorer/lib/explorer/chain/token.ex +++ b/apps/explorer/lib/explorer/chain/token.ex @@ -379,13 +379,15 @@ defmodule Explorer.Chain.Token do |> SortingHelper.page_with_sorting(paging_options, sorting, @default_sorting) filtered_query = - case filter && filter !== "" && Search.prepare_search_term(filter) do - {:some, filter_term} -> - sorted_paginated_query - |> apply_fts_filter(filter_term) + case filter && Chain.string_to_address_hash(filter) do + {:ok, address_hash} -> + from(t in sorted_paginated_query, where: t.contract_address_hash == ^address_hash) _ -> - sorted_paginated_query + case filter && filter !== "" && Search.prepare_search_term(filter) do + {:some, filter_term} -> apply_fts_filter(sorted_paginated_query, filter_term) + _ -> sorted_paginated_query + end end filtered_query diff --git a/apps/explorer/test/explorer/chain/token_test.exs b/apps/explorer/test/explorer/chain/token_test.exs index 65b896033618..ea5eb8a3331f 100644 --- a/apps/explorer/test/explorer/chain/token_test.exs +++ b/apps/explorer/test/explorer/chain/token_test.exs @@ -186,5 +186,50 @@ defmodule Explorer.Chain.TokenTest do assert Enum.all?(market_data, fn token -> is_nil(token.fiat_value) end) end + + test "finds token by contract address hash" do + token = insert(:token, name: "Token that we search for", symbol: "ATK") + insert(:token) + insert(:token) + insert(:token) + + address_hash_string = to_string(token.contract_address_hash) + + results = Token.list_top(address_hash_string) + + assert [%{name: "Token that we search for"}] = results + end + + test "finds token by contract address hash when given mixed-case address" do + contract_address = insert(:contract_address, hash: "0xdAC17F958D2ee523a2206206994597C13D831ec7") + insert(:token, contract_address: contract_address, name: "Token that we search for", symbol: "ATK") + insert(:token) + insert(:token) + insert(:token) + + results = Token.list_top("0xDAC17F958D2EE523A2206206994597C13D831EC7") + + assert [%{name: "Token that we search for"}] = results + end + + test "returns empty when searching with a valid address hash format that has no token" do + insert(:token, name: "Some Token", symbol: "STK") + non_existent_hash = "0xf000000000000000000000000000000000000000" + + results = Token.list_top(non_existent_hash) + + assert Enum.empty?(results) + end + + test "falls back to full-text search for invalid address hash format" do + insert(:token, name: "0xINVALID_HEX Token", symbol: "IHT") + insert(:token) + insert(:token) + insert(:token) + + results = Token.list_top("0xINVALID_HEX") + + assert [%{name: "0xINVALID_HEX Token"}] = results + end end end From 0c3773a296990d61adf3a9194ac42323556dcc39 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Mar 2026 13:24:08 +0300 Subject: [PATCH 24/42] chore(deps): bump ymlr from 5.1.4 to 5.1.5 (#14125) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index f82fd904bb44..3a652598f8d3 100644 --- a/mix.lock +++ b/mix.lock @@ -186,5 +186,5 @@ "websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"}, "websock_adapter": {:hex, :websock_adapter, "0.5.8", "3b97dc94e407e2d1fc666b2fb9acf6be81a1798a2602294aac000260a7c4a47d", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "315b9a1865552212b5f35140ad194e67ce31af45bcee443d4ecb96b5fd3f3782"}, "websockex": {:hex, :websockex, "0.5.1", "9de28d37bbe34f371eb46e29b79c94c94fff79f93c960d842fbf447253558eb4", [:mix], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8ef39576ed56bc3804c9cd8626f8b5d6b5721848d2726c0ccd4f05385a3c9f14"}, - "ymlr": {:hex, :ymlr, "5.1.4", "b924d61e1fc1ec371cde6ab3ccd9311110b1e052fc5c2460fb322e8380e7712a", [:mix], [], "hexpm", "75f16cf0709fbd911b30311a0359a7aa4b5476346c01882addefd5f2b1cfaa51"}, + "ymlr": {:hex, :ymlr, "5.1.5", "0b9207c7940be3f2bc29b77cd55109d5aa2f4dcde6575942017335769e6f5628", [:mix], [], "hexpm", "7030cb240c46850caeb3b01be745307632be319b15f03083136f6251f49b516d"}, } From 8bd06004e1c32c9ab21a1c98dcf54349ba52ab49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Mar 2026 15:00:50 +0300 Subject: [PATCH 25/42] chore(deps): bump absinthe from 1.9.0 to 1.9.1 (#14126) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mix.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mix.lock b/mix.lock index 3a652598f8d3..23af433a5b84 100644 --- a/mix.lock +++ b/mix.lock @@ -1,5 +1,5 @@ %{ - "absinthe": {:hex, :absinthe, "1.9.0", "28f11753d01c0e8b6cb6e764a23cf4081e0e6cae88f53f4c9e4320912aee9c07", [:mix], [{:dataloader, "~> 1.0.0 or ~> 2.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "db65993420944ad90e932827663d4ab704262b007d4e3900cd69615f14ccc8ce"}, + "absinthe": {:hex, :absinthe, "1.9.1", "19fe8614d5cdabefaf127ee224cb89eceea48314de4d709737451b43b5bdedd5", [:mix], [{:dataloader, "~> 1.0.0 or ~> 2.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:opentelemetry_process_propagator, "~> 0.2.1 or ~> 0.3", [hex: :opentelemetry_process_propagator, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d93e1aa61d68b974f48d5660104cb911ae045ee3a5d69954d251f91f3dbe2077"}, "absinthe_phoenix": {:hex, :absinthe_phoenix, "2.0.4", "f36999412fbd6a2339abb5b7e24a4cc9492bbc7909d5806deeef83b06f55c508", [:mix], [{:absinthe, "~> 1.5", [hex: :absinthe, repo: "hexpm", optional: false]}, {:absinthe_plug, "~> 1.5", [hex: :absinthe_plug, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.5", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.13 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}], "hexpm", "66617ee63b725256ca16264364148b10b19e2ecb177488cd6353584f2e6c1cf3"}, "absinthe_plug": {:git, "https://github.com/blockscout/absinthe_plug.git", "90a8188e94e2650f13259fb16462075a87f98e18", [tag: "1.5.8"]}, "absinthe_relay": {:hex, :absinthe_relay, "1.6.0", "73590bdb0dcc192622f39fa11da6fca9df4e339b603c45ec7a93493eb94f1829", [:mix], [{:absinthe, ">= 1.7.10", [hex: :absinthe, repo: "hexpm", optional: false]}, {:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "32d6397a7af3fd02678ef9bc8e2f574487f14593cb3e4f9110fb1c695d4d2ac0"}, From 308b7aeecd9b86b4fb30ae1b11493a727ed1ca54 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Thu, 19 Mar 2026 12:44:58 +0300 Subject: [PATCH 26/42] feat: KeyCloak integration (#14068) --- .../account/api/v2/address_controller.ex | 4 +- .../account/api/v2/authenticate_controller.ex | 27 +- .../account/api/v2/email_controller.ex | 4 +- .../lib/block_scout_web/plug/redis_cookie.ex | 15 +- .../block_scout_web/routers/account_router.ex | 8 +- .../api/v2/authenticate_controller_test.exs | 4 +- .../account/auth0_to_keycloak_migration.ex | 885 ++++++++++++++++++ .../lib/explorer/account/authentication.ex | 323 +++++++ .../explorer/lib/explorer/account/identity.ex | 9 +- apps/explorer/lib/explorer/helper.ex | 30 +- .../third_party_integrations/auth0.ex | 167 +--- .../auth0/internal.ex | 39 +- .../third_party_integrations/dynamic.ex | 12 +- .../third_party_integrations/keycloak.ex | 491 ++++++++++ .../mix/tasks/auth0_to_keycloak_migrate.ex | 42 + apps/utils/lib/utils/runtime_env_helper.ex | 14 +- config/runtime.exs | 10 +- cspell.json | 3 + docker-compose/envs/common-blockscout.env | 6 + 19 files changed, 1882 insertions(+), 211 deletions(-) create mode 100644 apps/explorer/lib/explorer/account/auth0_to_keycloak_migration.ex create mode 100644 apps/explorer/lib/explorer/account/authentication.ex create mode 100644 apps/explorer/lib/explorer/third_party_integrations/keycloak.ex create mode 100644 apps/explorer/lib/mix/tasks/auth0_to_keycloak_migrate.ex diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/address_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/address_controller.ex index 2034b8a10652..a7ab550c543e 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/address_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/address_controller.ex @@ -4,7 +4,7 @@ defmodule BlockScoutWeb.Account.API.V2.AddressController do import BlockScoutWeb.Account.AuthController, only: [current_user: 1] alias BlockScoutWeb.Account.API.V2.AuthenticateController - alias Explorer.ThirdPartyIntegrations.Auth0 + alias Explorer.Account.Authentication alias Plug.Conn action_fallback(BlockScoutWeb.Account.API.V2.FallbackController) @@ -34,7 +34,7 @@ defmodule BlockScoutWeb.Account.API.V2.AddressController do @spec link_address(Plug.Conn.t(), map()) :: :error | {:error, any()} | Conn.t() def link_address(conn, %{"message" => message, "signature" => signature}) do with %{uid: id} <- conn |> current_user(), - {:ok, auth} <- Auth0.link_address(id, message, signature) do + {:ok, auth} <- Authentication.link_address(id, message, signature) do AuthenticateController.put_auth_to_session(conn, auth) end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex index 2dd89dfb0464..5de82b17f93d 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/authenticate_controller.ex @@ -15,10 +15,8 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do alias BlockScoutWeb.Schemas.API.V2.Account, as: AccountSchemas - alias Explorer.Account.Identity + alias Explorer.Account.{Authentication, Identity} alias Explorer.Chain - alias Explorer.Chain.Address - alias Explorer.ThirdPartyIntegrations.{Auth0, Dynamic} alias Plug.Conn action_fallback(BlockScoutWeb.Account.API.V2.FallbackController) @@ -122,16 +120,17 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do | {:interval, integer()} | Conn.t() def send_otp(conn, _params) do + conn = Conn.fetch_session(conn) email = Map.get(conn.body_params, :email) - case Auth0.enabled?() && conn |> current_user() do + case current_user(conn) do nil -> - with :ok <- Auth0.send_otp(email, AccessHelper.conn_to_ip_string(conn)) do + with :ok <- Authentication.send_otp(email, AccessHelper.conn_to_ip_string(conn)) do conn |> put_status(200) |> json(%{message: "Success"}) end %{email: nil} -> - with :ok <- Auth0.send_otp_for_linking(email, AccessHelper.conn_to_ip_string(conn)) do + with :ok <- Authentication.send_otp_for_linking(email, AccessHelper.conn_to_ip_string(conn)) do conn |> put_status(200) |> json(%{message: "Success"}) end @@ -140,9 +139,6 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do |> put_status(500) |> put_view(UserView) |> render(:message, %{message: "This account already has an email"}) - - false -> - {:enabled, false} end end @@ -189,8 +185,7 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do email = Map.get(conn.body_params, :email) otp = Map.get(conn.body_params, :otp) - with {:enabled, true} <- {:enabled, Auth0.enabled?()}, - {:ok, auth} <- Auth0.confirm_otp_and_get_auth(email, otp, AccessHelper.conn_to_ip_string(conn)) do + with {:ok, auth} <- Authentication.confirm_otp(email, otp, AccessHelper.conn_to_ip_string(conn)) do put_auth_to_session(conn, auth) end end @@ -251,9 +246,8 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do """ @spec siwe_message(Conn.t(), map()) :: {:error, String.t()} | {:enabled, false} | {:format, :error} | Conn.t() def siwe_message(conn, %{address: address}) do - with {:enabled, true} <- {:enabled, Auth0.enabled?()}, - {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address)}, - {:ok, message} <- Auth0.generate_siwe_message(Address.checksum(address_hash)) do + with {:format, {:ok, address_hash}} <- {:format, Chain.string_to_address_hash(address)}, + {:ok, message} <- Authentication.generate_siwe_message(address_hash) do conn |> put_status(200) |> json(%{siwe_message: message}) end end @@ -306,8 +300,7 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do message = Map.get(conn.body_params, :message) signature = Map.get(conn.body_params, :signature) - with {:enabled, true} <- {:enabled, Auth0.enabled?()}, - {:ok, auth} <- Auth0.get_auth_with_web3(message, signature) do + with {:ok, auth} <- Authentication.verify_siwe_message(message, signature) do put_auth_to_session(conn, auth) end end @@ -356,7 +349,7 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateController do end with {:token, not_nil_token} when not is_nil(not_nil_token) <- {:token, token}, - {:ok, auth} <- Dynamic.get_auth_from_token(not_nil_token) do + {:ok, auth} <- Authentication.authenticate_via_dynamic(not_nil_token) do put_auth_to_session(conn, auth) end end diff --git a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/email_controller.ex b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/email_controller.ex index eefb007dd97d..a4e724a2b4fd 100644 --- a/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/email_controller.ex +++ b/apps/block_scout_web/lib/block_scout_web/controllers/account/api/v2/email_controller.ex @@ -6,7 +6,7 @@ defmodule BlockScoutWeb.Account.API.V2.EmailController do alias BlockScoutWeb.AccessHelper alias BlockScoutWeb.Account.API.V2.AuthenticateController - alias Explorer.Account.Identity + alias Explorer.Account.{Authentication, Identity} alias Explorer.{Helper, HttpClient, Repo} alias Explorer.ThirdPartyIntegrations.Auth0 @@ -98,7 +98,7 @@ defmodule BlockScoutWeb.Account.API.V2.EmailController do | Plug.Conn.t() def link_email(conn, %{"email" => email, "otp" => otp}) do with {:auth, %{} = user} <- {:auth, current_user(conn)}, - {:ok, auth} <- Auth0.link_email(user, email, otp, AccessHelper.conn_to_ip_string(conn)) do + {:ok, auth} <- Authentication.link_email(user, email, otp, AccessHelper.conn_to_ip_string(conn)) do AuthenticateController.put_auth_to_session(conn, auth) end end diff --git a/apps/block_scout_web/lib/block_scout_web/plug/redis_cookie.ex b/apps/block_scout_web/lib/block_scout_web/plug/redis_cookie.ex index 9b42d551d7d1..6131e250cab7 100644 --- a/apps/block_scout_web/lib/block_scout_web/plug/redis_cookie.ex +++ b/apps/block_scout_web/lib/block_scout_web/plug/redis_cookie.ex @@ -4,14 +4,15 @@ defmodule BlockScoutWeb.Plug.RedisCookie do Added Redis to have a possibility to invalidate session """ - require Logger - @behaviour Plug.Session.Store - - import Explorer.ThirdPartyIntegrations.Auth0, only: [cookie_key: 1] + import Explorer.Helper, only: [redis_key: 1] alias Plug.Crypto alias Plug.Crypto.{KeyGenerator, MessageEncryptor, MessageVerifier} + require Logger + + @behaviour Plug.Session.Store + @impl true def init(opts) do opts @@ -196,7 +197,7 @@ defmodule BlockScoutWeb.Plug.RedisCookie do defp store_to_redis(cookie) do Redix.command(:redix, [ "SET", - cookie_key(hash(cookie)), + redis_key(hash(cookie)), 1, "EX", Application.get_env(:block_scout_web, :session_cookie_ttl) @@ -206,14 +207,14 @@ defmodule BlockScoutWeb.Plug.RedisCookie do end defp remove_from_redis(sid) do - Redix.command(:redix, ["DEL", cookie_key(sid)]) + Redix.command(:redix, ["DEL", redis_key(sid)]) end defp check_in_redis({sid, map}, _cookie) when is_nil(sid) or map == %{}, do: {nil, %{}} defp check_in_redis({_sid, session}, cookie) do hash = hash(cookie) - key = cookie_key(hash) + key = redis_key(hash) case Redix.command(:redix, ["GET", key]) do {:ok, one} when one in [1, "1"] -> diff --git a/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex b/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex index 2bda9462e249..dbcf907de095 100644 --- a/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex +++ b/apps/block_scout_web/lib/block_scout_web/routers/account_router.ex @@ -54,7 +54,9 @@ defmodule BlockScoutWeb.Routers.AccountRouter do plug(OpenApiSpex.Plug.PutApiSpec, module: BlockScoutWeb.Specs.Private) end - pipeline :api_v2 do + pipeline :account_api_v2_no_protect_from_forgery do + plug(CheckAccountAPI) + plug( Plug.Parsers, parsers: [:urlencoded, :multipart, :json], @@ -165,7 +167,7 @@ defmodule BlockScoutWeb.Routers.AccountRouter do end scope "/v2" do - pipe_through([:api_v2, :account_api_v2]) + pipe_through(:account_api_v2) scope "/tags" do get("/address/:address_hash", TagsController, :tags_address) @@ -175,7 +177,7 @@ defmodule BlockScoutWeb.Routers.AccountRouter do end scope "/v2" do - pipe_through(:api_v2) + pipe_through(:account_api_v2_no_protect_from_forgery) post("/authenticate_via_wallet", AuthenticateController, :authenticate_via_wallet) get("/authenticate_via_dynamic", AuthenticateController, :authenticate_via_dynamic) diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs index d7a5c5e61437..de89711a0865 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs @@ -654,9 +654,9 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateControllerTest do "Bearer some_token" ) |> get("/api/account/v2/authenticate_via_dynamic") - |> json_response(500) + |> json_response(404) - assert response == %{"message" => "Dynamic integration is disabled"} + assert response == %{"message" => "This endpoint is not configured"} end end diff --git a/apps/explorer/lib/explorer/account/auth0_to_keycloak_migration.ex b/apps/explorer/lib/explorer/account/auth0_to_keycloak_migration.ex new file mode 100644 index 000000000000..399b12ca6945 --- /dev/null +++ b/apps/explorer/lib/explorer/account/auth0_to_keycloak_migration.ex @@ -0,0 +1,885 @@ +defmodule Explorer.Account.Auth0ToKeycloakMigration do + @moduledoc """ + Migrates users from Auth0 to Keycloak in bulk using batch APIs. + + Supports merging multiple Auth0 tenants (different Blockscout instances) into + a single Keycloak realm. When a user with the same email already exists in + Keycloak (from a previous tenant migration), the new address is appended to + the existing user's multivalued `address` attribute. Each Blockscout identity + gets the Keycloak ID of the user matching their email. + + ## Phases + 1. Exports all Auth0 users via async job (~5 API calls instead of N) + 2. Matches DB identities with Auth0 export data (in-memory) + 3. Batch-imports users into Keycloak via partial import (~N/500 API calls) + 4. Updates identity UIDs from Auth0 ID to Keycloak UUID + 5. Deletes non-migrated identities (no user data, cascades via DB foreign keys) + + ## Usage + + ### Development (Mix) + + mix auth0_to_keycloak_migrate --dry-run + mix auth0_to_keycloak_migrate --batch-size 200 + + ### Production (Release) + + Use `rpc` to execute on the running node — this is required so that + `Application.put_env` changes (disable account, switch to Keycloak) + take effect on the live system. Do NOT use `eval`, as it spawns a + separate BEAM process whose config changes are discarded on exit. + + bin/blockscout rpc "Explorer.Account.Auth0ToKeycloakMigration.run(dry_run: true)" + bin/blockscout rpc "Explorer.Account.Auth0ToKeycloakMigration.run(batch_size: 200)" + + ## Options + - `:dry_run` - When true, logs what would happen without making changes (default: false) + - `:batch_size` - Number of users per Keycloak partial import batch (default: 500) + + ## Memory + + The Auth0 export is loaded into memory. For ~800k users expect ~1-2GB RAM usage. + """ + use Utils.RuntimeEnvHelper, + keycloak_domain: [:explorer, [Explorer.ThirdPartyIntegrations.Keycloak, :domain]], + keycloak_realm: [:explorer, [Explorer.ThirdPartyIntegrations.Keycloak, :realm]], + keycloak_client_id: [:explorer, [Explorer.ThirdPartyIntegrations.Keycloak, :client_id]], + keycloak_client_secret: [:explorer, [Explorer.ThirdPartyIntegrations.Keycloak, :client_secret]], + chain_id: [:block_scout_web, :chain_id] + + import Ecto.Query + + alias Explorer.{Account, HttpClient, Repo} + alias Explorer.Account.{Api.Key, CustomABI, Identity, TagAddress, TagTransaction, Watchlist, WatchlistAddress} + alias Explorer.ThirdPartyIntegrations.{Auth0, Keycloak} + alias Explorer.ThirdPartyIntegrations.Auth0.Internal, as: Auth0Internal + alias OAuth2.Client + alias Ueberauth.Strategy.Auth0.OAuth + + require Logger + + @json_headers [{"content-type", "application/json"}] + @auth0_json_headers [{"Content-type", "application/json"}] + + # Auth0 export job + @export_min_poll_interval_ms 3_000 + @export_max_poll_interval_ms 30_000 + @export_max_total_wait_ms 1_800_000 + + # Keycloak partial import + @default_batch_size 500 + @batch_pause_ms 1_000 + + @spec run(keyword()) :: [{:ok, String.t(), String.t()} | {:error, String.t(), any()}] + def run(opts \\ []) do + with :ok <- check_auth0_configured(), + :ok <- check_keycloak_configured() do + do_run(opts) + else + {:error, message} -> + Logger.error(message) + [] + end + end + + defp check_auth0_configured do + if Auth0.enabled?(), do: :ok, else: {:error, "Auth0 is not configured. Cannot read source users."} + end + + defp check_keycloak_configured do + if Keycloak.enabled?(), do: :ok, else: {:error, "Keycloak is not configured. Cannot create target users."} + end + + defp do_run(opts) do + dry_run = Keyword.get(opts, :dry_run, false) + batch_size = Keyword.get(opts, :batch_size, @default_batch_size) + + unless dry_run, do: disable_account() + + case run_phases(dry_run, batch_size) do + {:ok, results, migrated_ids} -> + unless dry_run, do: finalize(results, migrated_ids) + results + + {:error, reason} -> + Logger.error("Migration failed: #{inspect(reason)}") + unless dry_run, do: enable_account() + [] + end + end + + defp run_phases(dry_run, batch_size) do + # Phase 1: Export all Auth0 users + Logger.info("Phase 1: Exporting Auth0 users...") + + with {:ok, auth0_users} <- export_auth0_users(), + # Phase 2: Load identities and match with Auth0 data + Logger.info("Phase 2: Loading identities and matching with Auth0 data..."), + identities = load_auth0_identities(), + {[_ | _] = items, skipped_with_data} <- build_migration_items(identities, auth0_users) do + execute_or_dry_run(items, skipped_with_data, dry_run, batch_size) + else + {[], skipped_with_data} -> + Logger.info("No identities to migrate") + {:ok, [], MapSet.new(skipped_with_data)} + + {:error, _} = error -> + error + end + end + + defp execute_or_dry_run(items, skipped_with_data, dry_run, batch_size) do + Logger.info("Found #{Enum.count(items)} identities to migrate#{if dry_run, do: " (DRY RUN)"}") + + # Protect both migrated identities and skipped-with-data identities from deletion + protected_ids = + items + |> MapSet.new(& &1.identity_id) + |> MapSet.union(MapSet.new(skipped_with_data)) + + if dry_run do + log_dry_run(items) + log_dry_run_deletion_count(protected_ids) + {:ok, [], protected_ids} + else + # Phase 3: Batch import to Keycloak + Logger.info("Phase 3: Importing users to Keycloak...") + keycloak_map = batch_import_to_keycloak(items, batch_size) + + # Phase 4: Update identity UIDs + Logger.info("Phase 4: Updating identity UIDs...") + results = update_identities(items, keycloak_map) + + summarize(results) + {:ok, results, protected_ids} + end + end + + defp export_auth0_users do + with {:ok, job_id} <- create_export_job(), + {:ok, location} <- poll_export_job(job_id) do + download_and_parse_export(location) + end + end + + defp create_export_job do + with token when is_binary(token) <- Auth0.get_m2m_jwt(), + client = OAuth.client(token: token), + {:ok, %OAuth2.Response{status_code: 200, body: %{"id" => job_id}}} <- + Client.post(client, "/api/v2/jobs/users-exports", export_job_body(), @auth0_json_headers) do + Logger.info("Auth0 export job created: #{job_id}") + {:ok, job_id} + else + nil -> {:error, "Failed to get Auth0 M2M JWT"} + error -> {:error, "Failed to create Auth0 export job: #{inspect(error)}"} + end + end + + defp export_job_body do + %{ + format: "json", + fields: [ + %{name: "user_id"}, + %{name: "email"}, + %{name: "email_verified"}, + %{name: "username"}, + %{name: "nickname"}, + %{name: "name"}, + %{name: "given_name"}, + %{name: "family_name"}, + %{name: "picture"}, + %{name: "user_metadata"}, + %{name: "app_metadata"}, + %{name: "identities"} + ] + } + end + + defp poll_export_job(job_id, waited_ms \\ 0) + + defp poll_export_job(_job_id, waited_ms) when waited_ms >= @export_max_total_wait_ms do + {:error, "Auth0 export job timed out after #{div(@export_max_total_wait_ms, 1000)}s"} + end + + defp poll_export_job(job_id, waited_ms) do + with token when is_binary(token) <- Auth0.get_m2m_jwt(), + client = OAuth.client(token: token), + {:ok, %OAuth2.Response{status_code: 200, body: body}} <- + Client.get(client, "/api/v2/jobs/#{job_id}") do + case body do + %{"status" => "completed", "location" => location} -> + Logger.info("Auth0 export job completed") + {:ok, location} + + %{"status" => "failed"} -> + {:error, "Auth0 export job failed: #{inspect(body)}"} + + %{"status" => status} = response -> + sleep_ms = poll_interval_from_estimate(response) + + Logger.info( + "Auth0 export status: #{status}, " <> + "estimated time left: #{response["time_left_seconds"] || "unknown"}s, " <> + "polling again in #{div(sleep_ms, 1000)}s (waited #{div(waited_ms, 1000)}s total)" + ) + + Process.sleep(sleep_ms) + poll_export_job(job_id, waited_ms + sleep_ms) + end + else + nil -> {:error, "Failed to get Auth0 M2M JWT"} + error -> {:error, "Failed to poll Auth0 export job: #{inspect(error)}"} + end + end + + defp poll_interval_from_estimate(%{"time_left_seconds" => seconds}) when is_number(seconds) and seconds > 0 do + (seconds * 250) + |> round() + |> max(@export_min_poll_interval_ms) + |> min(@export_max_poll_interval_ms) + end + + defp poll_interval_from_estimate(_response), do: @export_min_poll_interval_ms + + defp download_and_parse_export(location) do + Logger.info("Downloading Auth0 export...") + + case HttpClient.get(location, []) do + {:ok, %{status_code: 200, body: body}} -> + text = safe_gunzip(body) + + users_map = + text + |> String.split("\n", trim: true) + |> Map.new(fn line -> + {:ok, %{"user_id" => user_id} = user} = Jason.decode(line) + {user_id, user} + end) + + Logger.info("Parsed #{map_size(users_map)} users from Auth0 export") + {:ok, users_map} + + {:ok, %{status_code: status, body: resp_body}} -> + {:error, "Failed to download Auth0 export: HTTP #{status}: #{resp_body}"} + + {:error, reason} -> + {:error, "Failed to download Auth0 export: #{inspect(reason)}"} + end + end + + # Auth0 export files are gzipped. If content is already decompressed + # (e.g. HTTP client handled Content-Encoding), pass through as-is. + defp safe_gunzip(data) do + :zlib.gunzip(data) + rescue + ErlangError -> data + end + + defp load_auth0_identities do + has_user_data = + dynamic( + [identity: i], + exists(from(ta in TagAddress, where: ta.identity_id == parent_as(:identity).id, select: 1)) or + exists(from(tt in TagTransaction, where: tt.identity_id == parent_as(:identity).id, select: 1)) or + exists(from(ca in CustomABI, where: ca.identity_id == parent_as(:identity).id, select: 1)) or + exists(from(ak in Key, where: ak.identity_id == parent_as(:identity).id, select: 1)) or + exists( + from(wa in WatchlistAddress, + join: w in Watchlist, + on: wa.watchlist_id == w.id, + where: w.identity_id == parent_as(:identity).id, + select: 1 + ) + ) or i.plan_id != 1 + ) + + where_condition = + dynamic( + [identity: i], + ^has_user_data or not is_nil(i.email) + ) + + dynamic_select = dynamic([identity: i], %{identity: i, has_user_data: ^has_user_data}) + + q = + from(i in Identity, + as: :identity, + where: ^where_condition, + select: ^dynamic_select + ) + + Repo.account_repo().all(q) + end + + defp build_migration_items(identities, auth0_users) do + {items, skipped, skipped_with_data} = + Enum.reduce(identities, {[], 0, []}, fn entry, {acc, skip_count, data_acc} -> + case build_migration_item(entry, auth0_users) do + {:ok, item} -> + {[item | acc], skip_count, data_acc} + + {:skip_with_data, id} -> + {acc, skip_count + 1, [id | data_acc]} + + :skip -> + {acc, skip_count + 1, data_acc} + end + end) + + if skipped > 0, + do: Logger.warning("Skipped #{skipped} identities (not found in Auth0 export, no data, or no email+address)") + + if skipped_with_data != [] do + Logger.warning( + "#{length(skipped_with_data)} identities with user data were NOT found in Auth0 export " <> + "and will be preserved (not deleted): #{inspect(skipped_with_data)}" + ) + end + + deduplicated_items = merge_duplicate_emails(items) + log_duplicate_addresses(deduplicated_items) + {deduplicated_items, skipped_with_data} + end + + # Merges identities that share the same email within this instance. + # Uses Account.merge/1 to consolidate all user data into one identity, + # then returns a deduplicated items list with only the surviving identities. + defp merge_duplicate_emails(items) do + {dupes, uniques} = + items + |> Enum.filter(& &1.email) + |> Enum.group_by(& &1.email) + |> Enum.split_with(fn {_email, group} -> length(group) > 1 end) + + no_email_items = Enum.filter(items, &is_nil(&1.email)) + unique_items = Enum.flat_map(uniques, fn {_email, [item]} -> [item] end) + + merged_items = + Enum.map(dupes, fn {email, group} -> + ids = Enum.map(group, & &1.identity_id) + Logger.info("Merging #{length(group)} identities with duplicate email #{email}: #{inspect(ids)}") + + # Pick primary: prefer one with address, then with user_data + sorted = Enum.sort_by(group, fn item -> {is_nil(item.address), !item.has_user_data} end) + [primary | _rest] = sorted + + identities = + ids + |> Enum.map(&Repo.account_repo().get(Identity, &1)) + |> Enum.reject(&is_nil/1) + + # Reorder identities so primary is first + primary_identity = Enum.find(identities, &(&1.id == primary.identity_id)) + rest_identities = Enum.reject(identities, &(&1.id == primary.identity_id)) + + case Account.merge([primary_identity | rest_identities]) do + {{:ok, _}, _} -> + Logger.info("Merged duplicate email #{email} into identity #{primary.identity_id}") + primary + + {{:error, reason}, _} -> + Logger.error("Failed to merge duplicate email #{email}: #{inspect(reason)}, keeping primary only") + primary + end + end) + + no_email_items ++ unique_items ++ merged_items + end + + defp log_duplicate_addresses(items) do + items + |> Enum.filter(& &1.address) + |> Enum.group_by(&to_string(&1.address)) + |> Enum.filter(fn {_addr, group} -> length(group) > 1 end) + |> Enum.each(fn {address, group} -> + ids = Enum.map(group, & &1.identity_id) + + Logger.warning( + "Duplicate address within same instance: #{address}, identity IDs: #{inspect(ids)}. " <> + "Manual resolution required." + ) + end) + end + + defp build_migration_item(%{identity: %{id: id, uid: auth0_id}, has_user_data: has_user_data}, auth0_users) do + with {:ok, user} <- Map.fetch(auth0_users, auth0_id), + identity = user |> Auth0Internal.create_auth() |> Identity.new_identity(), + true <- has_user_data || identity.address_hash != nil, + # Every Auth0 user should have either email or address; skip anomalies. + keycloak_username when not is_nil(keycloak_username) <- + identity.email || (identity.address_hash && String.downcase(to_string(identity.address_hash))) do + {:ok, + %{ + identity_id: id, + auth0_id: auth0_id, + email: identity.email, + address: identity.address_hash, + username: keycloak_username, + has_user_data: has_user_data + }} + else + :error -> + Logger.warning("Auth0 user not found in export for identity #{id} (#{auth0_id})") + if has_user_data, do: {:skip_with_data, id}, else: :skip + + nil -> + Logger.warning("Identity #{id} (#{auth0_id}) has no email and no address in Auth0, skipping") + if has_user_data, do: {:skip_with_data, id}, else: :skip + + false -> + Logger.debug("No user data and no address in Auth0 for identity #{id} (#{auth0_id}), skipping") + :skip + end + end + + # Creates or finds Keycloak users for each migration item. + # Across tenants, multiple identities with the same email map to one Keycloak user. + # Returns %{identity_id => keycloak_id}. + defp batch_import_to_keycloak(items, batch_size) do + # Pre-check which addresses already exist in Keycloak (from previous tenant migrations). + # These addresses will be excluded from user bodies to preserve uniqueness. + taken_addresses = pre_check_addresses(items) + keycloak_users = Enum.map(items, &build_keycloak_user(&1, taken_addresses)) + + Logger.info("#{length(keycloak_users)} Keycloak users to import") + + num_batches = ceil(length(keycloak_users) / batch_size) + + items_by_username = Map.new(items, &{&1.username, &1}) + + username_to_keycloak_id = + keycloak_users + |> Enum.chunk_every(batch_size) + |> Enum.with_index(1) + |> Enum.flat_map(fn {batch, batch_num} -> + Logger.info("Keycloak import batch #{batch_num}/#{num_batches}") + results = import_batch(batch) + unless batch_num == num_batches, do: Process.sleep(@batch_pause_ms) + results + end) + |> Map.new() + |> resolve_missing_ids(items_by_username) + + # Convert username → keycloak_id into identity_id → keycloak_id + Map.new(items, fn %{identity_id: id, username: username} -> + {id, Map.get(username_to_keycloak_id, username)} + end) + end + + defp build_keycloak_user(%{username: username, email: email, address: address}, taken_addresses) do + normalized_address = if address, do: String.downcase(to_string(address)) + + # Exclude address if already claimed by another Keycloak user + use_address = + if normalized_address && MapSet.member?(taken_addresses, normalized_address) do + Logger.info( + "Address #{normalized_address} already exists in Keycloak, " <> + "not assigning to user #{username} (#{email || "no email"})" + ) + + nil + else + normalized_address + end + + body = + %{username: username, enabled: true} + |> then(fn body -> if email, do: Map.merge(body, %{email: email, emailVerified: true}), else: body end) + |> then(fn body -> + if use_address, + do: Map.put(body, :attributes, %{address: [use_address]}), + else: body + end) + + {username, body} + end + + # Checks which addresses from migration items already exist in Keycloak. + # Returns a MapSet of lowercased addresses that are already claimed. + defp pre_check_addresses(items) do + addresses = + items + |> Enum.filter(& &1.address) + |> Enum.map(&String.downcase(to_string(&1.address))) + |> Enum.uniq() + + if Enum.empty?(addresses) do + MapSet.new() + else + Logger.info("Pre-checking #{length(addresses)} addresses against Keycloak...") + + taken = Enum.filter(addresses, &address_exists_in_keycloak?/1) + + if taken != [] do + Logger.info("#{length(taken)} addresses already exist in Keycloak and will be skipped") + end + + MapSet.new(taken) + end + end + + defp address_exists_in_keycloak?(address) do + match?({:ok, [_ | _]}, Keycloak.find_users_by_address(address)) + end + + defp import_batch(user_entries) do + users = Enum.map(user_entries, fn {_username, body} -> body end) + entries_by_username = Map.new(user_entries) + + case keycloak_partial_import(users) do + {:ok, %{"results" => results}} -> + Enum.flat_map(results, fn + %{"resourceName" => username, "id" => keycloak_id, "action" => "ADDED"} -> + [{username, keycloak_id}] + + %{"resourceName" => username, "id" => keycloak_id, "action" => "SKIPPED"} -> + # User already exists (same username) — append our address to their attributes + maybe_append_address(keycloak_id, entries_by_username[username]) + [{username, keycloak_id}] + + %{"resourceName" => username, "action" => "SKIPPED"} -> + # SKIPPED without ID — will be resolved in resolve_missing_ids + [{username, nil}] + + other -> + Logger.warning("Unexpected partial import result: #{inspect(other)}") + [] + end) + + {:error, reason} -> + Logger.error("Partial import failed: #{inspect(reason)}, falling back to individual creates") + individual_create_fallback(user_entries) + end + end + + defp individual_create_fallback(user_entries) do + Enum.map(user_entries, fn {username, body} -> + {username, create_or_reuse_keycloak_user(body)} + end) + end + + defp create_or_reuse_keycloak_user(body) do + case Keycloak.create_user(body) do + {:ok, keycloak_id} -> + keycloak_id + + {:error, "User already exists"} -> + case lookup_and_append_address(body) do + {:ok, keycloak_id} -> keycloak_id + _ -> nil + end + + error -> + Logger.error("Failed to create Keycloak user #{body[:username]}: #{inspect(error)}") + nil + end + end + + # When a user already exists in Keycloak (e.g. from another tenant), + # find them by email and append the new address to their attributes. + defp lookup_and_append_address(%{email: email} = body) when is_binary(email) do + case Keycloak.find_users_by_email(email) do + {:ok, [%{"id" => keycloak_id} | _]} -> + maybe_append_address(keycloak_id, body) + {:ok, keycloak_id} + + _ -> + Logger.warning("User already exists but could not find by email: #{email}") + {:error, :not_found} + end + end + + defp lookup_and_append_address(_body), do: {:error, :no_email} + + # For users where we didn't get a Keycloak ID (SKIPPED without id, or 409), + # fall back to individual lookup by email/address. + defp resolve_missing_ids(username_to_id, items_by_username) do + missing = Enum.filter(username_to_id, fn {_username, id} -> is_nil(id) end) + + if not Enum.empty?(missing) do + Logger.info("Resolving #{length(missing)} users without Keycloak IDs...") + end + + Enum.reduce(missing, username_to_id, fn {username, _nil}, acc -> + item = items_by_username[username] + + case resolve_single_user(item) do + {:ok, keycloak_id} -> Map.put(acc, username, keycloak_id) + :error -> acc + end + end) + end + + defp resolve_single_user(%{email: email, address: address, username: username}) do + case lookup_keycloak_user(email, address) do + {:ok, keycloak_id} -> + append_address_to_keycloak_user(keycloak_id, address) + {:ok, keycloak_id} + + _ -> + Logger.error("Could not resolve Keycloak ID for #{username}") + :error + end + end + + defp lookup_keycloak_user(email, address) do + with {:ok, []} <- maybe_find_by_email(email), + {:ok, []} <- maybe_find_by_address(address) do + {:error, :not_found} + else + {:ok, [%{"id" => keycloak_id} | _]} -> {:ok, keycloak_id} + error -> error + end + end + + defp maybe_find_by_email(nil), do: {:ok, []} + defp maybe_find_by_email(email), do: Keycloak.find_users_by_email(email) + + defp maybe_find_by_address(nil), do: {:ok, []} + defp maybe_find_by_address(address), do: Keycloak.find_users_by_address(address) + + # Extracts address from a Keycloak user body and appends it to the existing user. + defp maybe_append_address(keycloak_id, %{attributes: %{address: [address | _]}}) do + append_address_to_keycloak_user(keycloak_id, address) + end + + defp maybe_append_address(_keycloak_id, _body), do: :ok + + # Appends an address to a Keycloak user's multivalued address attribute. + # Address uniqueness is already guaranteed by pre_check_addresses — addresses + # claimed by other users were excluded from bodies before import started. + defp append_address_to_keycloak_user(keycloak_id, address) when is_binary(address) do + address = String.downcase(to_string(address)) + + with {:ok, user} <- Keycloak.get_user(keycloak_id) do + current_addresses = get_in(user, ["attributes", "address"]) || [] + + if address in current_addresses do + Logger.debug("Address #{address} already on Keycloak user #{keycloak_id}") + :ok + else + new_attributes = Map.put(user["attributes"] || %{}, "address", [address | current_addresses]) + merged = Map.put(user, "attributes", new_attributes) + Keycloak.update_user(keycloak_id, merged) + end + end + end + + defp append_address_to_keycloak_user(_keycloak_id, _address), do: :ok + + defp update_identities(items, keycloak_map) do + Enum.map(items, fn %{identity_id: id, auth0_id: auth0_id} -> + with {:ok, keycloak_id} when is_binary(keycloak_id) <- Map.fetch(keycloak_map, id), + :ok <- update_identity_uid(id, keycloak_id) do + {:ok, auth0_id, keycloak_id} + else + :error -> + Logger.error("No Keycloak ID for identity #{id} (#{auth0_id})") + {:error, auth0_id, :no_keycloak_id} + + {:ok, nil} -> + Logger.error("No Keycloak ID for identity #{id} (#{auth0_id})") + {:error, auth0_id, :no_keycloak_id} + + {:error, reason} -> + Logger.error("Failed to update identity #{id}: #{inspect(reason)}") + {:error, auth0_id, reason} + end + end) + end + + defp update_identity_uid(id, keycloak_id) do + case Repo.account_repo().get(Identity, id) do + nil -> + {:error, "Identity #{id} not found"} + + identity -> + identity + |> Identity.changeset(%{uid: keycloak_id}) + |> Repo.account_repo().update() + |> case do + {:ok, _} -> :ok + {:error, changeset} -> {:error, "Update failed: #{inspect(changeset.errors)}"} + end + end + end + + defp delete_non_migrated_identities(protected_ids) do + protected_id_list = MapSet.to_list(protected_ids) + + query = + if Enum.empty?(protected_id_list) do + from(i in Identity) + else + from(i in Identity, where: i.id not in ^protected_id_list) + end + + {deleted, _} = Repo.account_repo().delete_all(query) + Logger.info("Deleted #{deleted} non-migrated identities (cascading to their associated data)") + end + + defp keycloak_partial_import(users) do + body = %{ifResourceExists: "SKIP", users: users} + + with {:ok, token} <- get_keycloak_admin_token() do + url = keycloak_url("/admin/realms/#{URI.encode(keycloak_realm())}/partialImport") + + case HttpClient.post(url, Jason.encode!(body), keycloak_auth_headers(token) ++ @json_headers) do + {:ok, %{status_code: 200, body: resp_body}} -> + Jason.decode(resp_body) + + {:ok, %{status_code: status, body: resp_body}} -> + {:error, "HTTP #{status}: #{resp_body}"} + + {:error, reason} -> + {:error, reason} + end + end + end + + # Caches the admin token in the process dictionary for the duration of the migration. + defp get_keycloak_admin_token do + case Process.get(:keycloak_admin_token) do + {token, expires_at} when is_integer(expires_at) and expires_at > 0 -> + if System.system_time(:second) < expires_at do + {:ok, token} + else + fetch_keycloak_admin_token() + end + + _ -> + fetch_keycloak_admin_token() + end + end + + defp fetch_keycloak_admin_token do + url = keycloak_url("/realms/#{URI.encode(keycloak_realm())}/protocol/openid-connect/token") + + body = + URI.encode_query(%{ + grant_type: "client_credentials", + client_id: keycloak_client_id(), + client_secret: keycloak_client_secret() + }) + + case HttpClient.post(url, body, [{"content-type", "application/x-www-form-urlencoded"}]) do + {:ok, %{status_code: 200, body: resp_body}} -> + case Jason.decode(resp_body) do + {:ok, %{"access_token" => token, "expires_in" => ttl}} -> + Process.put(:keycloak_admin_token, {token, System.system_time(:second) + ttl - 30}) + {:ok, token} + + _ -> + {:error, "Invalid Keycloak token response"} + end + + error -> + {:error, "Failed to get Keycloak admin token: #{inspect(error)}"} + end + end + + defp keycloak_auth_headers(token), do: [{"authorization", "Bearer #{token}"}] + + defp keycloak_url(path) do + keycloak_domain() + |> URI.parse() + |> URI.append_path(path) + |> URI.to_string() + end + + defp disable_account do + Logger.info("Disabling account access for migration") + update_account_enabled(false) + end + + defp enable_account do + update_account_enabled(true) + end + + defp update_account_enabled(enabled) do + config = Application.get_env(:explorer, Account, []) + Application.put_env(:explorer, Account, Keyword.put(config, :enabled, enabled)) + end + + defp disable_auth0 do + config = Application.get_env(:ueberauth, OAuth, []) + Application.put_env(:ueberauth, OAuth, Keyword.put(config, :domain, nil)) + end + + defp finalize(results, protected_ids) do + failed = Enum.count(results, &match?({:error, _, _}, &1)) + + if failed == 0 do + Logger.info("Phase 5: Deleting non-migrated identities...") + delete_non_migrated_identities(protected_ids) + + disable_auth0() + invalidate_all_sessions() + enable_account() + Logger.info("Migration successful. Auth0 disabled, Keycloak is now the active auth provider.") + else + Logger.warning( + "Migration had #{failed} failures. Account access remains DISABLED. " <> + "Review errors and re-run, or manually enable account access." + ) + end + end + + # Invalidates all user sessions by removing their Redis validation keys. + # Session cookies are signed but validated against Redis on each request. + # Without the Redis key, the cookie is rejected and the user must re-authenticate. + defp invalidate_all_sessions do + Logger.info("Invalidating all user sessions...") + chain_id = chain_id() + pattern = if chain_id, do: "#{chain_id}_*", else: "*" + count = scan_and_delete(pattern) + Logger.info("Invalidated #{count} session keys from Redis") + end + + defp scan_and_delete(pattern, cursor \\ "0", count \\ 0) do + case Redix.command(:redix, ["SCAN", cursor, "MATCH", pattern, "COUNT", 1000]) do + {:ok, [next_cursor, keys]} -> + unless keys == [], do: Redix.command(:redix, ["DEL" | keys]) + new_count = count + length(keys) + + if next_cursor == "0" do + new_count + else + scan_and_delete(pattern, next_cursor, new_count) + end + + error -> + Logger.error("Redis SCAN failed: #{inspect(error)}") + count + end + end + + defp summarize(results) do + succeeded = Enum.count(results, &match?({:ok, _, _}, &1)) + failed = Enum.count(results, &match?({:error, _, _}, &1)) + Logger.info("Migration complete: #{succeeded} succeeded, #{failed} failed, #{succeeded + failed} total") + end + + defp log_dry_run_deletion_count(protected_ids) do + total_count = Repo.account_repo().aggregate(Identity, :count) + + would_delete = total_count - MapSet.size(protected_ids) + + Logger.info( + "[DRY RUN] Would delete #{would_delete} non-migrated identities " <> + "(#{total_count} total, #{MapSet.size(protected_ids)} protected)" + ) + end + + defp log_dry_run(items) do + Enum.each(items, fn item -> + identity_label = if Map.has_key?(item, :identity_id), do: "Identity #{item.identity_id}: ", else: "" + + Logger.info( + "[DRY RUN] #{identity_label}#{item.auth0_id} -> " <> + "email=#{inspect(item.email)}, address=#{inspect(item.address)}, " <> + "username=#{item.username}, has_user_data=#{item.has_user_data}" + ) + end) + end +end diff --git a/apps/explorer/lib/explorer/account/authentication.ex b/apps/explorer/lib/explorer/account/authentication.ex new file mode 100644 index 000000000000..d5062001c510 --- /dev/null +++ b/apps/explorer/lib/explorer/account/authentication.ex @@ -0,0 +1,323 @@ +defmodule Explorer.Account.Authentication do + @moduledoc """ + Context module for user authentication and third-party identity management. + """ + + alias Explorer.{Account, Helper} + alias Explorer.Account.Identity + alias Explorer.Chain.{Address, Hash} + alias Explorer.ThirdPartyIntegrations.{Auth0, Dynamic, Keycloak} + alias Ueberauth.Auth + + require Logger + + @callback send_otp(String.t(), String.t()) :: :ok | {:error, String.t()} | :error | {:format, :email} + @callback send_otp_for_linking(String.t(), String.t()) :: :ok | {:error, String.t()} | :error | {:format, :email} + @callback confirm_otp_and_get_auth(String.t(), String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error + @callback link_email(Identity.session(), String.t(), String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error + @callback find_or_create_web3_user(String.t(), String.t()) :: {:ok, Auth.t()} | {:error, String.t()} | :error + @callback link_address(String.t(), String.t()) :: {:ok, Auth.t()} | {:error, String.t()} | :error + + @request_siwe_message "Request Sign in with Ethereum message via /api/account/v2/siwe_message" + @wrong_nonce "Wrong nonce in message" + @misconfiguration_detected "Misconfiguration detected, please contact support." + + @doc """ + Sends a one-time password to the specified email address using the enabled authentication provider. + + ## Parameters + - `email`: The email address to send the OTP to + - `ip`: The IP address of the requester + + ## Returns + - `:ok` if the OTP was sent successfully + - `{:error, String.t()}` if the email already exists or sending failed + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + - `{:format, :email}` if the email format is invalid + """ + @spec send_otp(String.t(), String.t()) :: :ok | {:error, String.t()} | :error | {:enabled, false} | {:format, :email} + def send_otp(email, ip) do + with {:ok, module} <- responsible_module() do + module.send_otp(email, ip) + end + end + + @doc """ + Sends a one-time password to the specified email address for account linking using the enabled authentication provider. + + ## Parameters + - `email`: The email address to send the OTP to + - `ip`: The IP address of the requester + + ## Returns + - `:ok` if the OTP was sent successfully + - `{:error, String.t()}` if an account with the given email already exists + or sending failed + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + - `{:format, :email}` if the email format is invalid + """ + @spec send_otp_for_linking(String.t(), String.t()) :: + :ok | {:error, String.t()} | :error | {:enabled, false} | {:format, :email} + def send_otp_for_linking(email, ip) do + with {:ok, module} <- responsible_module() do + module.send_otp_for_linking(email, ip) + end + end + + @doc """ + Confirms a one-time password and retrieves authentication data for the given email using the enabled authentication provider. + + ## Parameters + - `email`: The email address associated with the OTP + - `otp`: The one-time password to confirm + - `ip`: The IP address of the requester + + ## Returns + - `{:ok, Auth.t()}` if the OTP is confirmed successfully, where `Auth.t()` + contains the user's authentication data including UID, provider, strategy, + info, credentials, and extra information + - `{:error, String.t()}` if confirmation failed with a description of the + error + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + """ + @spec confirm_otp(String.t(), String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error | {:enabled, false} + def confirm_otp(email, otp, ip) do + with {:ok, module} <- responsible_module() do + module.confirm_otp_and_get_auth(email, otp, ip) + end + end + + @doc """ + Links an email address to an existing user account by verifying a one-time password using the enabled authentication provider. + + ## Parameters + - `user`: The session map of the existing user account; the account must not + have an email linked (`email: nil`) for the linking to proceed + - `email`: The email address to link to the account + - `otp`: The one-time password for verification + - `ip`: The IP address of the requester + + ## Returns + - `{:ok, Auth.t()}` if the email was successfully linked, where `Auth.t()` + contains the user's authentication data including UID, provider, strategy, + info, credentials, and extra information + - `{:error, String.t()}` if the account already has an email linked, an + account with the given email already exists, the OTP is wrong or expired, + or linking failed with a description of the error + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + """ + @spec link_email(Identity.session(), String.t(), String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error | {:enabled, false} + def link_email(user, email, otp, ip) do + with {:ok, module} <- responsible_module() do + module.link_email(user, email, otp, ip) + end + end + + @doc """ + Generates a Sign-In with Ethereum (SIWE) message for the given address. + + This function creates a SIWE message with a unique nonce, caches the nonce, + and returns the formatted message string. + + ## Parameters + - `address`: The Ethereum address for which to generate the SIWE message + + ## Returns + - `{:ok, String.t()}` containing the generated SIWE message + - `{:error, "Misconfiguration detected, please contact support."}` if the + nonce could not be cached due to a Redis configuration problem + - `{:error, String.t()}` if the SIWE message could not be formatted + """ + @spec generate_siwe_message(Hash.Address.t()) :: {:ok, String.t()} | {:error, String.t()} + def generate_siwe_message(address_hash) do + checksum_address = Address.checksum(address_hash) + nonce = Siwe.generate_nonce() + {int_chain_id, _} = Integer.parse(Application.get_env(:block_scout_web, :chain_id)) + + message = %Siwe.Message{ + domain: Helper.get_app_host(), + address: checksum_address, + statement: Application.get_env(:explorer, Account)[:siwe_message], + uri: + Application.get_env(:block_scout_web, BlockScoutWeb.Endpoint)[:url][:scheme] <> + "://" <> Helper.get_app_host(), + version: "1", + chain_id: int_chain_id, + nonce: nonce, + issued_at: DateTime.utc_now() |> DateTime.to_iso8601(), + expiration_time: DateTime.utc_now() |> DateTime.add(300, :second) |> DateTime.to_iso8601() + } + + with {:cache, {:ok, _nonce}} <- {:cache, cache_nonce_for_address(nonce, checksum_address)}, + {:message, {:ok, message}} <- {:message, Siwe.to_str(message)} do + {:ok, message} + else + {:cache, {:error, error}} -> + Logger.error("Error while caching nonce: #{inspect(error)}") + {:error, @misconfiguration_detected} + + {:message, {:error, error}} -> + Logger.error("Error while generating Sign in with Ethereum Message: #{inspect(error)}") + {:error, error} + end + end + + @doc """ + Verifies a Sign-In with Ethereum (SIWE) message and signature, then finds or + creates the corresponding web3 user account. + + The message is parsed and validated against the stored nonce for the signing + address. A nonce must have been previously generated via `generate_siwe_message/1` + and is consumed (deleted from cache) upon successful verification. + + ## Parameters + - `message`: The raw SIWE message string to verify + - `signature`: The hex-encoded EIP-191 signature produced by signing `message` + + ## Returns + - `{:ok, Auth.t()}` if the message and signature are valid and the user was + found or created successfully + - `{:error, "Request Sign in with Ethereum message via /api/account/v2/siwe_message"}` + if no nonce exists for the signing address (i.e. a SIWE message was never + requested) + - `{:error, "Wrong nonce in message"}` if the nonce in the message does not + match the cached nonce for the address + - `{:error, String.t()}` if signature parsing or user lookup/creation failed + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + """ + @spec verify_siwe_message(String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error | {:enabled, false} + def verify_siwe_message(message, signature) do + with {:module, {:ok, module}} <- {:module, responsible_module()}, + {:signature, {:ok, %{nonce: nonce, address: address}}} <- + {:signature, message |> String.trim() |> Siwe.parse_if_valid(signature)}, + {:nonce, {:ok, ^nonce}} <- {:nonce, get_nonce_for_address(address)} do + module.find_or_create_web3_user(address, signature) + else + {:nonce, :not_found} -> + {:error, @request_siwe_message} + + {:nonce, {:ok, _}} -> + {:error, @wrong_nonce} + + {_step, error} -> + error + end + end + + @doc """ + Links an Ethereum address to an existing user account by verifying a SIWE + message and signature. + + The message is parsed and validated against the stored nonce for the signing + address, in the same way as `verify_siwe_message/2`. On success, the verified + address is associated with the given user in the active authentication provider. + + ## Parameters + - `user_id`: The ID of the existing user account to link the address to + - `message`: The raw SIWE message string to verify + - `signature`: The hex-encoded EIP-191 signature produced by signing `message` + + ## Returns + - `{:ok, Auth.t()}` if the message and signature are valid and the address was + successfully linked to the user + - `{:error, "Wrong nonce in message"}` if the nonce in the message does not + match the cached nonce for the signing address + - `{:error, "Request Sign in with Ethereum message via /api/account/v2/siwe_message"}` + if no nonce is found for the address (e.g. Redis error or nonce was never + requested) + - `{:error, String.t()}` if signature parsing or the linking operation failed + - `:error` if there was an unexpected error + - `{:enabled, false}` if no authentication provider is enabled + """ + @spec link_address(String.t(), String.t(), String.t()) :: + {:ok, Auth.t()} | {:error, String.t()} | :error | {:enabled, false} + def link_address(user_id, message, signature) do + with {:module, {:ok, module}} <- {:module, responsible_module()}, + {:signature, {:ok, %{nonce: nonce, address: address}}} <- + {:signature, message |> String.trim() |> Siwe.parse_if_valid(signature)}, + {:nonce, {:ok, ^nonce}} <- {:nonce, get_nonce_for_address(address)} do + module.link_address(user_id, address) + else + {:nonce, {:ok, _}} -> + {:error, @wrong_nonce} + + {:nonce, :not_found} -> + {:error, @request_siwe_message} + + {:nonce, error} -> + Logger.error("Error while retrieving nonce for address: #{inspect(error)}") + :error + + {_step, error} -> + error + end + end + + @doc """ + Authenticates a user using a Dynamic-issued JWT token. + + ## Parameters + - `token`: A JWT token issued by the Dynamic authentication service + + ## Returns + - `{:ok, Auth.t()}` if the token is valid and the user was found or created + successfully + - `{:error, String.t()}` if token validation or user lookup failed + - `:error` if there was an unexpected error + """ + @spec authenticate_via_dynamic(String.t()) :: {:ok, Auth.t()} | {:error, String.t()} | :error | {:enabled, false} + def authenticate_via_dynamic(token) do + Dynamic.get_auth_from_token(token) + end + + defp cache_nonce_for_address(nonce, address_hash) do + case Redix.command(:redix, [ + "SET", + Helper.redis_key(String.downcase(address_hash) <> "siwe_nonce"), + nonce, + "EX", + 300 + ]) do + {:ok, _} -> + {:ok, nonce} + + error -> + Logger.error("Error while caching nonce: #{inspect(error)}") + {:error, "Redis configuration problem, please contact support."} + end + end + + defp get_nonce_for_address(address_hash) do + cookie_key = Helper.redis_key(String.downcase(address_hash) <> "siwe_nonce") + + case Redix.command(:redix, ["GETDEL", cookie_key]) do + {:ok, nil} -> + :not_found + + {:ok, nonce} -> + {:ok, nonce} + + error -> + Logger.error("Error while consuming nonce for address: #{inspect(error)}") + {:error, "Redis configuration problem, please contact support."} + end + end + + defp responsible_module do + cond do + Auth0.enabled?() -> {:ok, Auth0} + Keycloak.enabled?() -> {:ok, Keycloak} + true -> {:enabled, false} + end + end +end diff --git a/apps/explorer/lib/explorer/account/identity.ex b/apps/explorer/lib/explorer/account/identity.ex index f4be04b8538d..40e08cce9c1f 100644 --- a/apps/explorer/lib/explorer/account/identity.ex +++ b/apps/explorer/lib/explorer/account/identity.ex @@ -31,7 +31,7 @@ defmodule Explorer.Account.Identity do typed_schema "account_identities" do field(:uid_hash, Cloak.Ecto.SHA256) :: binary() | nil field(:uid, Explorer.Encrypted.Binary, null: false) - field(:email, Explorer.Encrypted.Binary, null: false) + field(:email, Explorer.Encrypted.Binary, null: true) field(:name, :string, virtual: true) field(:nickname, :string, virtual: true) field(:address_hash, Hash.Address, virtual: true) @@ -53,6 +53,7 @@ defmodule Explorer.Account.Identity do |> cast(attrs, [:uid, :email, :name, :nickname, :avatar, :verification_email_sent_at, :otp_sent_at]) |> validate_required([:uid]) |> put_hashed_fields() + |> unique_constraint(:uid_hash, name: :account_identities_uid_hash_index) end defp put_hashed_fields(changeset) do @@ -138,7 +139,8 @@ defmodule Explorer.Account.Identity do |> Repo.account_repo().update() end - defp new_identity(auth) do + @spec new_identity(Auth.t()) :: t() + def new_identity(auth) do %__MODULE__{ uid: auth.uid, uid_hash: auth.uid, @@ -335,7 +337,8 @@ defmodule Explorer.Account.Identity do - A string representation of the Ethereum address hash, or nil if not found. """ @spec address_hash_from_auth(Auth.t()) :: String.t() | nil - def address_hash_from_auth(%Auth{provider: :dynamic, extra: %Extra{raw_info: %{address_hash: address_hash}}}) do + def address_hash_from_auth(%Auth{provider: provider, extra: %Extra{raw_info: %{address_hash: address_hash}}}) + when provider in ~w(dynamic keycloak)a do address_hash end diff --git a/apps/explorer/lib/explorer/helper.ex b/apps/explorer/lib/explorer/helper.ex index 9ee77129ebfe..3ee9b893fa2b 100644 --- a/apps/explorer/lib/explorer/helper.ex +++ b/apps/explorer/lib/explorer/helper.ex @@ -2,14 +2,15 @@ defmodule Explorer.Helper do @moduledoc """ Auxiliary common functions. """ - require Logger + + import Ecto.Query + import Explorer.Chain.SmartContract, only: [burn_address_hash_string: 0] alias ABI.TypeDecoder alias Explorer.Chain alias Explorer.Chain.{Address.Reputation, Address.ScamBadgeToAddress, Data, Hash, Wei} - import Ecto.Query - import Explorer.Chain.SmartContract, only: [burn_address_hash_string: 0] + require Logger @max_safe_integer round(:math.pow(2, 63)) - 1 @@ -734,4 +735,27 @@ defmodule Explorer.Helper do end def process_rpc_response(response, _node, _fallback), do: response + + @doc """ + Generates a key from chain_id and a given string for storing in Redis. + + This function combines the chain_id (if available) with the provided string to + create a unique key for Redis storage. + + ## Parameters + - `string`: The string to be combined with the chain_id + + ## Returns + - `String.t()` representing the generated key + """ + @spec redis_key(String.t()) :: String.t() + def redis_key(key) do + chain_id = Application.get_env(:block_scout_web, :chain_id) + + if chain_id do + chain_id <> "_" <> key + else + key + end + end end diff --git a/apps/explorer/lib/explorer/third_party_integrations/auth0.ex b/apps/explorer/lib/explorer/third_party_integrations/auth0.ex index f9d2dab8f537..fe25e14f225d 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/auth0.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/auth0.ex @@ -1,19 +1,17 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do @moduledoc """ - Module for fetching jwt Auth0 Management API (https://auth0.com/docs/api/management/v2) jwt + Auth0 Management REST API client for user management. """ require Logger - alias Explorer.{Account, Helper, HttpClient} - alias Explorer.Account.Identity + alias Explorer.Account.{Authentication, Identity} + alias Explorer.HttpClient alias Explorer.ThirdPartyIntegrations.Auth0.Internal alias Explorer.ThirdPartyIntegrations.Dynamic - alias Ueberauth.Auth alias Ueberauth.Strategy.Auth0.OAuth - @request_siwe_message "Request Sign in with Ethereum message via /api/account/v2/siwe_message" - @wrong_nonce "Wrong nonce in message" - @misconfiguration_detected "Misconfiguration detected, please contact support." + @behaviour Authentication + @json_content_type [{"Content-type", "application/json"}] @spec enabled? :: boolean() @@ -65,29 +63,6 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do end end - @doc """ - Generates a key from chain_id and cookie hash for storing in Redis. - - This function combines the chain_id (if available) with the provided hash to - create a unique key for Redis storage. - - ## Parameters - - `hash`: The hash to be combined with the chain_id - - ## Returns - - `String.t()` representing the generated key - """ - @spec cookie_key(binary) :: String.t() - def cookie_key(hash) do - chain_id = Application.get_env(:block_scout_web, :chain_id) - - if chain_id do - chain_id <> "_" <> hash - else - hash - end - end - defp cache_token(token, ttl) do Redix.command(:redix, ["SET", Internal.redis_key(), token, "EX", ttl]) token @@ -107,8 +82,9 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do - `:ok` if the OTP was sent successfully - `{:error, String.t()}` error with the description - `:error` if there was an unexpected error + - `{:format, :email}` if the email format is invalid """ - @spec send_otp_for_linking(String.t(), String.t()) :: :error | :ok | {:error, String.t()} + @impl Authentication def send_otp_for_linking(email, ip) do case Internal.find_users_by_email(email) do {:ok, []} -> @@ -138,7 +114,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do - `:error` if there was an unexpected error - `{:interval, integer()}` if the user need to wait before sending the OTP """ - @spec send_otp(String.t(), String.t()) :: :error | :ok | {:interval, integer()} + @impl Authentication def send_otp(email, ip) do case Internal.find_users_by_email(email) do {:ok, []} -> @@ -169,8 +145,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do - `{:error, String.t()}` error with the description - `:error` if there was an unexpected error """ - @spec link_email(Identity.session(), String.t(), String.t(), String.t()) :: - :error | {:ok, Auth.t()} | {:error, String.t()} + @impl Authentication def link_email(%{uid: user_id_without_email, email: nil}, email, otp, ip) do case Internal.find_users_by_email(email) do {:ok, []} -> @@ -206,7 +181,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do - `{:error, String.t()}` error with the description - `:error` if there was an unexpected error """ - @spec confirm_otp_and_get_auth(String.t(), String.t(), String.t()) :: :error | {:error, String.t()} | {:ok, Auth.t()} + @impl Authentication def confirm_otp_and_get_auth(email, otp, ip) do with {:ok, token} <- Internal.confirm_otp(email, otp, ip), {:ok, %{"sub" => user_id} = user} <- Internal.get_user_from_token(token), @@ -241,86 +216,29 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do do: Internal.update_session_with_address_hash(session) @doc """ - Generates a Sign-In with Ethereum (SIWE) message for the given address. - - This function creates a SIWE message with a unique nonce, caches the nonce, - and returns the formatted message string. - - ## Parameters - - `address`: The Ethereum address for which to generate the SIWE message - - ## Returns - - `{:ok, String.t()}` containing the generated SIWE message - - `{:error, String.t()}` error with the description - """ - @spec generate_siwe_message(String.t()) :: {:ok, String.t()} | {:error, String.t()} - def generate_siwe_message(address) do - nonce = Siwe.generate_nonce() - {int_chain_id, _} = Integer.parse(Application.get_env(:block_scout_web, :chain_id)) - - message = %Siwe.Message{ - domain: Helper.get_app_host(), - address: address, - statement: Application.get_env(:explorer, Account)[:siwe_message], - uri: - Application.get_env(:block_scout_web, BlockScoutWeb.Endpoint)[:url][:scheme] <> - "://" <> Helper.get_app_host(), - version: "1", - chain_id: int_chain_id, - nonce: nonce, - issued_at: DateTime.utc_now() |> DateTime.to_iso8601(), - expiration_time: DateTime.utc_now() |> DateTime.add(300, :second) |> DateTime.to_iso8601() - } - - with {:cache, {:ok, _nonce}} <- {:cache, Internal.cache_nonce_for_address(nonce, address)}, - {:message, {:ok, message}} <- {:message, Siwe.to_str(message)} do - {:ok, message} - else - {:cache, {:error, error}} -> - Logger.error("Error while caching nonce: #{inspect(error)}") - {:error, @misconfiguration_detected} - - {:message, {:error, error}} -> - Logger.error("Error while generating Sign in with Ethereum Message: #{inspect(error)}") - {:error, error} - end - end - - @doc """ - Links an Ethereum address to an existing user account. + Links a web3 wallet address to an existing user account. - This function verifies the SIWE message and signature, checks for existing - users with the same address, and updates the user's account with the new - address. + Checks that no other account is already associated with the given address, + then updates the user's Auth0 profile with the address and returns updated + authentication information. ## Parameters - - `user_id`: The ID of the existing user account - - `message`: The SIWE message - - `signature`: The signature of the SIWE message + - `user_id`: The Auth0 user ID of the account to update. + - `address`: The web3 wallet address to associate with the account. ## Returns - - `{:ok, Auth.t()}` if the address was successfully linked - - `{:error, String.t()}` error with the description - - `:error` if there was an unexpected error + - `{:ok, Auth.t()}` if the address was successfully linked. + - `{:error, "Account with this address already exists"}` if another account + is already using the given address. + - `{:error, String.t()}` if a known error occurs. + - `:error` if an unexpected error occurs. """ - @spec link_address(String.t(), String.t(), String.t()) :: :error | {:error, String.t()} | {:ok, Auth.t()} - def link_address(user_id, message, signature) do - with {:signature, {:ok, %{nonce: nonce, address: address}}} <- - {:signature, message |> String.trim() |> Siwe.parse_if_valid(signature)}, - {:nonce, {:ok, ^nonce}} <- {:nonce, Internal.get_nonce_for_address(address)}, - {:user, {:ok, []}} <- {:user, Internal.find_users_by_web3_address(address)}, + @impl Authentication + def link_address(user_id, address) do + with {:user, {:ok, []}} <- {:user, Internal.find_users_by_web3_address(address)}, {:ok, user} <- Internal.update_user_with_web3_address(user_id, address) do {:ok, Internal.create_auth(user)} else - {:nonce, {:ok, _}} -> - {:error, @wrong_nonce} - - {:nonce, _} -> - {:error, @request_siwe_message} - - {:signature, error} -> - error - {:user, {:ok, _users}} -> {:error, "Account with this address already exists"} @@ -333,36 +251,27 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do end @doc """ - Authenticates a user using a Sign-In with Ethereum (SIWE) message and signature. + Finds an existing user by web3 wallet address or creates a new one. - This function verifies the SIWE message and signature, finds or creates a user - associated with the Ethereum address, and returns the authentication information. + Delegates to the Auth0 backend to locate a user whose profile contains the + given address. If no user is found, a new Auth0 account is created using the + address as the username and the cryptographic signature as the password. + On success, returns authentication information for the resolved user. ## Parameters - - `message`: The SIWE message - - `signature`: The signature of the SIWE message + - `address`: The web3 wallet address used to identify or create the user. + - `signature`: The cryptographic signature used as the password when creating + a new user. ## Returns - - `{:ok, Auth.t()}` if authentication is successful - - `{:error, String.t()}` error with the description - - `:error` if there was an unexpected error + - `{:ok, Auth.t()}` if the user was found or successfully created. + - `{:error, String.t()}` if a known error occurs. + - `:error` if an unexpected error occurs. """ - @spec get_auth_with_web3(String.t(), String.t()) :: :error | {:error, String.t()} | {:ok, Auth.t()} - def get_auth_with_web3(message, signature) do - with {:signature, {:ok, %{nonce: nonce, address: address}}} <- - {:signature, message |> String.trim() |> Siwe.parse_if_valid(signature)}, - {:nonce, {:ok, ^nonce}} <- {:nonce, Internal.get_nonce_for_address(address)}, - {:user, {:ok, user}} <- {:user, Internal.process_web3_user(address, signature)} do + @impl Authentication + def find_or_create_web3_user(address, signature) do + with {:ok, user} <- Internal.process_web3_user(address, signature) do {:ok, Internal.create_auth(user)} - else - {:nonce, {:ok, nil}} -> - {:error, @request_siwe_message} - - {:nonce, {:ok, _}} -> - {:error, @wrong_nonce} - - {_step, error} -> - error end end end diff --git a/apps/explorer/lib/explorer/third_party_integrations/auth0/internal.ex b/apps/explorer/lib/explorer/third_party_integrations/auth0/internal.ex index 2850d82fb771..1222576908c3 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/auth0/internal.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/auth0/internal.ex @@ -19,7 +19,6 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0.Internal do alias Explorer.{Account, Helper, Repo} alias Explorer.Account.Identity - alias Explorer.Chain.Address alias Explorer.ThirdPartyIntegrations.Auth0 alias Explorer.ThirdPartyIntegrations.Auth0.{Legacy, Migrated} alias OAuth2.{AccessToken, Client} @@ -159,7 +158,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0.Internal do - `:error`: If there was an error in the process - `{:interval, integer()}`: If OTP was recently sent and the resend interval hasn't elapsed """ - @spec handle_existing_user(map(), String.t(), String.t()) :: :ok | :error | {:interval, integer()} + @spec handle_existing_user(map(), String.t(), String.t()) :: :ok | :error | {:interval, integer()} | {:format, :email} def handle_existing_user(user, email, ip) do user |> create_auth() @@ -405,42 +404,6 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0.Internal do end end - @doc """ - Caches a nonce value for Sign-In with Ethereum (SIWE) authentication. - - Stores the provided nonce in Redis with an expiration time of 300 seconds (5 minutes), - using a key derived from the wallet address. This cached nonce is used for the SIWE - authentication flow to prevent replay attacks. - - ## Parameters - - `nonce`: The random nonce value to cache - - `address`: The Ethereum wallet address associated with the nonce - - ## Returns - - `{:ok, nonce}`: If the nonce was successfully cached - - `{:error, reason}`: If there was an error caching the nonce - """ - @spec cache_nonce_for_address(nonce, String.t()) :: - {:ok, nonce} | {:error, atom() | Redix.Error.t() | Redix.ConnectionError.t()} - when nonce: String.t() - def cache_nonce_for_address(nonce, address) do - case Redix.command(:redix, ["SET", Auth0.cookie_key(address <> "siwe_nonce"), nonce, "EX", 300]) do - {:ok, _} -> {:ok, nonce} - error -> error - end - end - - def get_nonce_for_address(address_hash) do - cookie_key = Auth0.cookie_key(Address.checksum(address_hash) <> "siwe_nonce") - - with {:get, {:ok, nonce}} <- {:get, Redix.command(:redix, ["GET", cookie_key])}, - {:del, {:ok, _}} <- {:del, Redix.command(:redix, ["DEL", cookie_key])} do - {:ok, nonce} - else - _ -> {:error, "Redis configuration problem, please contact support."} - end - end - @doc """ Searches for Auth0 users associated with a specific web3 wallet address. diff --git a/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex b/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex index ba89a8be19de..abcd45708093 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex @@ -15,6 +15,8 @@ defmodule Explorer.ThirdPartyIntegrations.Dynamic do alias Ueberauth.Auth alias Ueberauth.Auth.{Extra, Info} + require Logger + @doc """ Authenticates a user by verifying a JWT token and extracting identity information from its claims. @@ -35,14 +37,18 @@ defmodule Explorer.ThirdPartyIntegrations.Dynamic do - `{:error, String.t()}` if token verification fails or additional authentication is required. """ - @spec get_auth_from_token(String.t()) :: {:ok, Auth.t()} | {:error, String.t()} + @spec get_auth_from_token(String.t()) :: {:ok, Auth.t()} | {:error, String.t()} | {:enabled, false} def get_auth_from_token(token) do with {:enabled, true} <- {:enabled, Application.get_env(:explorer, __MODULE__)[:enabled]}, {:ok, claims} <- Token.verify_and_validate(token) do create_auth(claims) else - {:enabled, false} -> {:error, "Dynamic integration is disabled"} - {:error, reason} -> {:error, inspect(reason)} + {:error, reason} -> + Logger.error("Error while verifying token: #{inspect(reason)}") + {:error, "Invalid token"} + + not_enabled -> + not_enabled end end diff --git a/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex new file mode 100644 index 000000000000..74ec7e6b7e03 --- /dev/null +++ b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex @@ -0,0 +1,491 @@ +defmodule Explorer.ThirdPartyIntegrations.Keycloak do + @moduledoc """ + Keycloak Admin REST API client for user management. + Mirrors Auth0 Management API calls. + """ + + use Utils.RuntimeEnvHelper, + domain: [:explorer, [__MODULE__, :domain]], + realm: [:explorer, [__MODULE__, :realm]], + client_id: [:explorer, [__MODULE__, :client_id]], + client_secret: [:explorer, [__MODULE__, :client_secret]], + otp_template: [:explorer, [Explorer.Account, :sendgrid, :otp_template]], + otp_sender: [:explorer, [Explorer.Account, :sendgrid, :sender]], + email_webhook_url: [:explorer, [__MODULE__, :email_webhook_url]] + + import Bamboo.{Email, SendGridHelper} + + alias Explorer.Account.Authentication + alias Explorer.{Helper, HttpClient, Mailer, Vault} + alias Ueberauth.Auth + + require Logger + + @behaviour Authentication + + @json_headers [{"content-type", "application/json"}] + + @otp_length 6 + @otp_ttl_seconds 300 + @max_otp_attempts 3 + + @spec enabled?() :: boolean() + def enabled? do + Enum.all?([domain(), realm(), client_id(), client_secret()], &(&1 not in [nil, ""])) + end + + @impl Authentication + def send_otp(email, _ip) do + otp = generate_otp() + + with :ok <- store_otp(email, otp), + :ok <- deliver_otp_email(email, otp) do + :ok + else + error -> + Logger.error("Error while sending otp: #{inspect(error)}") + :error + end + end + + @impl Authentication + def confirm_otp_and_get_auth(email, otp, _ip) do + case verify_otp(email, otp) do + :ok -> find_or_create_email_user(email) + :not_found -> {:error, "Verification code has expired."} + error -> error + end + end + + @impl Authentication + def send_otp_for_linking(email, ip) do + case find_users_by_email(email) do + {:ok, []} -> send_otp(email, ip) + {:ok, [_ | _]} -> {:error, "Account with this email already exists"} + error -> error + end + end + + @impl Authentication + def link_email(%{uid: user_id, email: nil}, email, otp, _ip) do + case verify_otp(email, otp) do + :ok -> + with :ok <- + "#{users_path()}/#{user_id}" + |> admin_put(%{email: email, emailVerified: true}) + |> handle_update("Failed to link email to user"), + {:ok, user} <- get_user(user_id) do + send_registration_webhook(email) + {:ok, create_auth(user)} + end + + :not_found -> + {:error, "Verification code has expired."} + + :error -> + :error + + {:error, _} = error -> + error + end + end + + def link_email(%{uid: _user_id, email: _}, _email, _otp, _ip) do + {:error, "User already has an email linked"} + end + + @impl Authentication + def find_or_create_web3_user(address_hash, _signature) do + case find_users_by_address(address_hash) do + {:ok, []} -> + with {:ok, user_id} <- create_web3_user(address_hash), + {:ok, user} <- get_user(user_id) do + {:ok, create_auth(user, address_hash)} + end + + {:ok, [user]} -> + {:ok, create_auth(user, address_hash)} + + {:ok, _} -> + {:error, "Multiple users with the same address found"} + + error -> + error + end + end + + @impl Authentication + def link_address(user_id, address_hash) do + case find_users_by_address(address_hash) do + {:ok, []} -> + link_address_to_user(user_id, address_hash) + + {:ok, _} -> + {:error, "Account with this address already exists"} + + error -> + error + end + end + + defp find_or_create_email_user(email) do + case find_users_by_email(email) do + {:ok, []} -> + with {:ok, user_id} <- create_email_user(email), + send_registration_webhook(email), + {:ok, user} <- get_user(user_id) do + {:ok, create_auth(user)} + end + + {:ok, [user]} -> + {:ok, create_auth(user)} + + {:ok, _} -> + {:error, "Multiple users with the same email found"} + + error -> + error + end + end + + @doc false + def find_users_by_email(email) do + case admin_get(users_path(), %{email: email, exact: true}) do + {:ok, %{status_code: 200, body: body}} -> {:ok, Jason.decode!(body)} + {:ok, %{status_code: 404}} -> {:ok, []} + error -> handle_error(error, "Failed to search user by email") + end + end + + @doc false + def find_users_by_address(address) do + case admin_get(users_path(), %{q: "address:#{String.downcase(address)}"}) do + {:ok, %{status_code: 200, body: body}} -> {:ok, Jason.decode!(body)} + {:ok, %{status_code: 404}} -> {:ok, []} + error -> handle_error(error, "Failed to search user by address") + end + end + + defp create_email_user(email) do + create_user(%{ + username: email, + email: email, + emailVerified: true, + enabled: true + }) + end + + defp create_web3_user(address_hash) do + create_user(%{ + username: String.downcase(address_hash), + enabled: true, + attributes: %{address: [String.downcase(address_hash)]} + }) + end + + @doc false + def create_user(body) do + with {:ok, %{status_code: 201, headers: headers}} <- admin_post(users_path(), body), + {:location_id, location_id, _headers} when not is_nil(location_id) <- + {:location_id, extract_location_id(headers), headers} do + {:ok, location_id} + else + {:location_id, nil, headers} -> + Logger.error("Failed to extract user ID from Keycloak response headers: #{inspect(headers)}") + :error + + {:ok, %{status_code: 409}} -> + {:error, "User already exists"} + + error -> + handle_error(error, "Failed to create user") + end + end + + defp link_address_to_user(user_id, address_hash) do + with {:ok, user} <- get_user(user_id), + new_attributes = %{"address" => [String.downcase(address_hash)]}, + merged = + Map.update(user, "attributes", new_attributes, fn attributes -> + Map.merge(attributes, new_attributes) + end), + :ok <- + "#{users_path()}/#{user_id}" |> admin_put(merged) |> handle_update("Failed to link address to user"), + {:ok, user} <- get_user(user_id) do + {:ok, create_auth(user, address_hash)} + end + end + + @doc false + def get_user(user_id) do + case admin_get("#{users_path()}/#{user_id}") do + {:ok, %{status_code: 200, body: body}} -> {:ok, Jason.decode!(body)} + {:ok, %{status_code: 404}} -> {:error, "User not found"} + error -> handle_error(error, "Failed to get user") + end + end + + @doc false + def update_user(user_id, body) do + "#{users_path()}/#{user_id}" |> admin_put(body) |> handle_update("Failed to update user") + end + + defp admin_get(path, params \\ %{}) do + with {:ok, token} <- get_admin_token() do + HttpClient.get( + build_url(path), + auth_headers(token) ++ @json_headers, + params: params + ) + end + end + + defp admin_post(path, body) do + with {:ok, token} <- get_admin_token() do + HttpClient.post( + build_url(path), + Jason.encode!(body), + auth_headers(token) ++ @json_headers + ) + end + end + + defp admin_put(path, body) do + with {:ok, token} <- get_admin_token() do + HttpClient.request( + :put, + build_url(path), + auth_headers(token) ++ @json_headers, + Jason.encode!(body) + ) + end + end + + defp auth_headers(token), do: [{"authorization", "Bearer #{token}"}] + + defp get_admin_token do + with {:redix, {:ok, token}} when not is_nil(token) <- + {:redix, Redix.command(:redix, ["GET", admin_token_key()])}, + {:vault, {:ok, decrypted_token}} <- {:vault, Vault.decrypt(token)} do + {:ok, decrypted_token} + else + {:redix, _} -> + fetch_and_cache_admin_token() + + {:vault, error} -> + Logger.error("Failed to decrypt admin token from Redis: #{inspect(error)}") + {:error, :decryption_failed} + end + end + + defp fetch_and_cache_admin_token do + url = build_url("/realms/#{URI.encode(realm())}/protocol/openid-connect/token") + + body = + URI.encode_query(%{ + grant_type: "client_credentials", + client_id: client_id(), + client_secret: client_secret() + }) + + headers = [{"content-type", "application/x-www-form-urlencoded"}] + + case HttpClient.post(url, body, headers) do + {:ok, %{status_code: 200, body: resp_body}} -> + case Jason.decode(resp_body) do + {:ok, %{"access_token" => token, "expires_in" => ttl}} -> + Redix.command(:redix, ["SET", admin_token_key(), Vault.encrypt!(token), "EX", ttl - 1]) + {:ok, token} + + _ -> + :error + end + + other -> + Logger.error("Failed to obtain Keycloak admin token: #{inspect(other)}") + :error + end + end + + defp admin_token_key, do: "keycloak:#{client_id()}:admin_token" + + defp handle_error({:ok, response}, error_message) do + Logger.error("#{error_message}: status=#{response.status_code} body=#{response.body}") + :error + end + + defp handle_error({:error, reason}, error_message) do + Logger.error("#{error_message}: #{inspect(reason)}") + :error + end + + defp handle_error(:error, error_message) do + Logger.error("#{error_message}: unknown error") + :error + end + + defp handle_update(result, error_message) do + case result do + {:ok, %{status_code: 204}} -> :ok + {:ok, %{status_code: 404}} -> {:error, "User not found"} + {:ok, %{status_code: 409}} -> {:error, "Email already in use by another account"} + error -> handle_error(error, error_message) + end + end + + defp build_url(path) do + domain() + |> URI.parse() + |> URI.append_path(path) + |> URI.to_string() + end + + defp users_path, do: "/admin/realms/#{URI.encode(realm())}/users" + + defp extract_location_id(headers) do + Enum.find_value(headers, fn {key, value} -> + String.downcase(key) == "location" && value |> String.split("/") |> List.last() + end) + end + + defp otp_key(email), do: Helper.redis_key("#{client_id()}:otp:#{String.downcase(email)}") + defp otp_attempts_key(email), do: Helper.redis_key("#{client_id()}:otp_attempts:#{String.downcase(email)}") + + defp store_otp(email, otp) do + case Redix.command(:redix, ["SET", otp_key(email), Vault.encrypt!(otp), "EX", @otp_ttl_seconds]) do + {:ok, "OK"} -> + Redix.command(:redix, ["DEL", otp_attempts_key(email)]) + :ok + + error -> + Logger.error("Failed to store OTP in Redis: #{inspect(error)}") + :error + end + end + + defp verify_otp(email, otp) do + case fetch_otp(email) do + {:ok, ^otp} -> + delete_otp(email) + :ok + + {:ok, _} -> + increment_and_check_attempts(email) + + other -> + other + end + end + + defp fetch_otp(email) do + case Redix.command(:redix, ["GET", otp_key(email)]) do + {:ok, nil} -> + :not_found + + {:ok, value} -> + case Vault.decrypt(value) do + {:ok, otp} -> + {:ok, otp} + + {:error, reason} -> + Logger.error("Failed to decrypt OTP from Redis: #{inspect(reason)}") + :error + end + + {:error, reason} -> + Logger.error("Failed to fetch OTP from Redis: #{inspect(reason)}") + :error + end + end + + defp delete_otp(email) do + Redix.command(:redix, ["DEL", otp_key(email), otp_attempts_key(email)]) + end + + defp increment_and_check_attempts(email) do + key = otp_attempts_key(email) + + case Redix.pipeline(:redix, [["INCR", key], ["EXPIRE", key, @otp_ttl_seconds]]) do + {:ok, [attempts, _]} when attempts >= @max_otp_attempts -> + delete_otp(email) + {:error, "Too many wrong verification code attempts. Please request a new code."} + + {:ok, _} -> + {:error, "Wrong verification code."} + + {:error, reason} -> + Logger.error("Failed to increment OTP attempts in Redis: #{inspect(reason)}") + {:error, "Wrong verification code."} + end + end + + defp generate_otp do + 4 + |> :crypto.strong_rand_bytes() + |> :binary.decode_unsigned() + |> rem(round(:math.pow(10, @otp_length))) + |> Integer.to_string() + |> String.pad_leading(@otp_length, "0") + end + + defp deliver_otp_email(email, otp) do + email + |> compose_otp_email(otp) + |> deliver() + end + + defp compose_otp_email(to, otp) do + email = new_email(from: otp_sender(), to: to) + + email + |> with_template(otp_template()) + |> add_dynamic_field("otp", otp) + |> add_dynamic_field("ttl_minutes", div(@otp_ttl_seconds, 60)) + end + + defp deliver(email) do + case Mailer.deliver_now(email, response: false) do + {:ok, _email} -> + :ok + + {:error, error} -> + Logger.error("Failed to deliver OTP email: #{inspect(error)}") + :error + end + end + + defp send_registration_webhook(email), do: do_send_registration_webhook(email, email_webhook_url()) + + defp do_send_registration_webhook(email, webhook_url) when not is_nil(webhook_url) do + payload = + Jason.encode!(%{ + email: email, + name: email, + labels: [Helper.get_app_host()] + }) + + Task.start(fn -> + case HttpClient.post(webhook_url, payload, @json_headers) do + {:ok, _} -> :ok + {:error, reason} -> Logger.error("Registration webhook failed: #{inspect(reason)}") + end + end) + end + + defp do_send_registration_webhook(_email, nil), do: :ok + + defp create_auth(user, address_hash \\ nil) do + address_hash = address_hash || List.first(user["attributes"]["address"] || []) + + %Auth{ + uid: user["id"], + provider: :keycloak, + info: %Auth.Info{ + email: user["email"], + name: user["firstName"] && user["lastName"] && "#{user["firstName"]} #{user["lastName"]}", + nickname: user["username"] + }, + extra: %Auth.Extra{raw_info: %{address_hash: address_hash}} + } + end +end diff --git a/apps/explorer/lib/mix/tasks/auth0_to_keycloak_migrate.ex b/apps/explorer/lib/mix/tasks/auth0_to_keycloak_migrate.ex new file mode 100644 index 000000000000..f9a4639e9b89 --- /dev/null +++ b/apps/explorer/lib/mix/tasks/auth0_to_keycloak_migrate.ex @@ -0,0 +1,42 @@ +defmodule Mix.Tasks.Auth0ToKeycloakMigrate do + @moduledoc """ + Migrates users from Auth0 to Keycloak. + + Reads all account identities with Auth0 UIDs, fetches their Auth0 user data, + creates corresponding Keycloak users, and updates the identity UIDs. + + ## Usage + + mix auth0_to_keycloak_migrate [--dry-run] [--batch-size N] + + ## Options + + * `--dry-run` - Preview migration without making changes + * `--batch-size` - Number of users per batch (default: 50) + + ## Prerequisites + + Both Auth0 and Keycloak must be configured via environment variables. + Account access is automatically disabled during migration and re-enabled after. + """ + + use Mix.Task + + alias Explorer.Account.Auth0ToKeycloakMigration + alias Mix.Task, as: MixTask + + @shortdoc "Migrate users from Auth0 to Keycloak" + + @impl MixTask + def run(args) do + {opts, _, _} = + OptionParser.parse(args, + switches: [dry_run: :boolean, batch_size: :integer], + aliases: [n: :dry_run, b: :batch_size] + ) + + MixTask.run("app.start") + + Auth0ToKeycloakMigration.run(opts) + end +end diff --git a/apps/utils/lib/utils/runtime_env_helper.ex b/apps/utils/lib/utils/runtime_env_helper.ex index 75193f4d41e4..5beaef30fb8a 100644 --- a/apps/utils/lib/utils/runtime_env_helper.ex +++ b/apps/utils/lib/utils/runtime_env_helper.ex @@ -38,8 +38,20 @@ defmodule Utils.RuntimeEnvHelper do # Generate the runtime env module name sibling_module = Module.concat(caller_module, "__RuntimeEnvs__") + # Resolve __MODULE__ in paths before injecting into sibling module + resolved_env_vars = + Enum.map(env_vars, fn {var_name, path} -> + resolved_path = + Macro.postwalk(path, fn + {:__MODULE__, _, _} -> caller_module + node -> node + end) + + {var_name, resolved_path} + end) + sibling_module_body = - for {var_name, path} <- env_vars do + for {var_name, path} <- resolved_env_vars do quote do def unquote(var_name)() do # credo:disable-for-next-line Credo.Check.Design.AliasUsage diff --git a/config/runtime.exs b/config/runtime.exs index b000138ab927..b3c7083431d1 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -663,6 +663,13 @@ config :explorer, Explorer.ThirdPartyIntegrations.Dynamic, config :explorer, Explorer.ThirdPartyIntegrations.Dynamic.Strategy, enabled: !is_nil(dynamic_env_id) +config :explorer, Explorer.ThirdPartyIntegrations.Keycloak, + domain: ConfigHelper.parse_url_env_var("ACCOUNT_KEYCLOAK_DOMAIN", nil, true), + realm: System.get_env("ACCOUNT_KEYCLOAK_REALM"), + client_id: System.get_env("ACCOUNT_KEYCLOAK_CLIENT_ID"), + client_secret: System.get_env("ACCOUNT_KEYCLOAK_CLIENT_SECRET"), + email_webhook_url: ConfigHelper.parse_url_env_var("ACCOUNT_KEYCLOAK_EMAIL_WEBHOOK_URL") + enabled? = ConfigHelper.parse_bool_env_var("MICROSERVICE_SC_VERIFIER_ENABLED", "true") # or "eth_bytecode_db" type = System.get_env("MICROSERVICE_SC_VERIFIER_TYPE", "sc_verifier") @@ -729,7 +736,8 @@ config :explorer, Explorer.Account, enabled: ConfigHelper.parse_bool_env_var("ACCOUNT_ENABLED"), sendgrid: [ sender: System.get_env("ACCOUNT_SENDGRID_SENDER"), - template: System.get_env("ACCOUNT_SENDGRID_TEMPLATE") + template: System.get_env("ACCOUNT_SENDGRID_TEMPLATE"), + otp_template: System.get_env("ACCOUNT_SENDGRID_OTP_TEMPLATE") ], verification_email_resend_interval: ConfigHelper.parse_time_env_var("ACCOUNT_VERIFICATION_EMAIL_RESEND_INTERVAL", "5m"), diff --git a/cspell.json b/cspell.json index f3cf38a0f65d..526572a5a466 100644 --- a/cspell.json +++ b/cspell.json @@ -295,6 +295,7 @@ "getblocknobytime", "getblockreward", "getcontractcreation", + "GETDEL", "getepoch", "getlogs", "getminedblocks", @@ -439,6 +440,7 @@ "multichain", "multiprotocol", "multis", + "multivalued", "munchos", "munknownc", "munknowne", @@ -522,6 +524,7 @@ "Posix", "postgrex", "Postrge", + "postwalk", "predeploy", "prederive", "prederived", diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index 1fe4cf4c247c..f21f1cb0d004 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -504,12 +504,18 @@ DECODE_NOT_A_CONTRACT_CALLS=true # ACCOUNT_SENDGRID_API_KEY= # ACCOUNT_SENDGRID_SENDER= # ACCOUNT_SENDGRID_TEMPLATE= +# ACCOUNT_SENDGRID_OTP_TEMPLATE= # ACCOUNT_VERIFICATION_EMAIL_RESEND_INTERVAL= # ACCOUNT_OTP_RESEND_INTERVAL= # ACCOUNT_PRIVATE_TAGS_LIMIT=2000 # ACCOUNT_WATCHLIST_ADDRESSES_LIMIT=15 # ACCOUNT_SIWE_MESSAGE= # ACCOUNT_DYNAMIC_ENV_ID= +# ACCOUNT_KEYCLOAK_DOMAIN= +# ACCOUNT_KEYCLOAK_REALM= +# ACCOUNT_KEYCLOAK_CLIENT_ID= +# ACCOUNT_KEYCLOAK_CLIENT_SECRET= +# ACCOUNT_KEYCLOAK_EMAIL_WEBHOOK_URL= ACCOUNT_CLOAK_KEY= ACCOUNT_ENABLED=false ACCOUNT_REDIS_URL=redis://redis-db:6379 From d1b3f9a397c63c040b699ec54b971cb5b4d9e3ea Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Fri, 20 Mar 2026 10:58:28 +0300 Subject: [PATCH 27/42] fix: Fix pending ops migration overflow by adaptive batching and chunked inserts (#14135) --- .../chain/pending_operations_helper.ex | 50 ++++++++++++++++--- .../switch_pending_operations_test.exs | 50 +++++++++++++++++++ 2 files changed, 94 insertions(+), 6 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/pending_operations_helper.ex b/apps/explorer/lib/explorer/chain/pending_operations_helper.ex index 1a6c49c55bf2..59a8db61c0fa 100644 --- a/apps/explorer/lib/explorer/chain/pending_operations_helper.ex +++ b/apps/explorer/lib/explorer/chain/pending_operations_helper.ex @@ -102,9 +102,13 @@ defmodule Explorer.Chain.PendingOperationsHelper do end defp from_blocks_to_transactions_function do + from_blocks_to_transactions_function(@blocks_batch_size) + end + + defp from_blocks_to_transactions_function(blocks_batch_size) do pbo_block_numbers_query = PendingBlockOperation - |> limit(@blocks_batch_size) + |> limit(^blocks_batch_size) |> select([pbo], pbo.block_number) case Repo.all(pbo_block_numbers_query) do @@ -119,16 +123,50 @@ defmodule Explorer.Chain.PendingOperationsHelper do |> Repo.all() |> Helper.add_timestamps() - Repo.insert_all(PendingTransactionOperation, pto_params, on_conflict: :nothing) + case insert_pending_transaction_operations(pto_params) do + :ok -> + delete_pending_block_operations(pbo_block_numbers) - PendingBlockOperation - |> where([pbo], pbo.block_number in ^pbo_block_numbers) - |> Repo.delete_all() + :continue - :continue + {:error, :too_many_parameters} when blocks_batch_size > 1 -> + from_blocks_to_transactions_function(max(div(blocks_batch_size, 2), 1)) + + {:error, :too_many_parameters} -> + Repo.safe_insert_all(PendingTransactionOperation, pto_params, on_conflict: :nothing) + delete_pending_block_operations(pbo_block_numbers) + + :continue + end end end + defp insert_pending_transaction_operations([]), do: :ok + + defp insert_pending_transaction_operations(pto_params) do + Repo.insert_all(PendingTransactionOperation, pto_params, on_conflict: :nothing) + :ok + rescue + error in Postgrex.QueryError -> + if too_many_parameters_error?(error) do + {:error, :too_many_parameters} + else + reraise error, __STACKTRACE__ + end + end + + defp delete_pending_block_operations(pbo_block_numbers) do + PendingBlockOperation + |> where([pbo], pbo.block_number in ^pbo_block_numbers) + |> Repo.delete_all() + end + + defp too_many_parameters_error?(%Postgrex.QueryError{message: message}) when is_binary(message) do + Regex.match?(~r/postgresql protocol can not handle \d+ parameters, the maximum is \d+/i, message) + end + + defp too_many_parameters_error?(_), do: false + @doc """ Generates a query to find pending block operations that match any of the given block hashes. diff --git a/apps/explorer/test/explorer/migrator/switch_pending_operations_test.exs b/apps/explorer/test/explorer/migrator/switch_pending_operations_test.exs index 43575036c1c3..1ce2a33ce0db 100644 --- a/apps/explorer/test/explorer/migrator/switch_pending_operations_test.exs +++ b/apps/explorer/test/explorer/migrator/switch_pending_operations_test.exs @@ -48,6 +48,56 @@ defmodule Explorer.Migrator.SwitchPendingOperationsTest do assert [_, _, _, _, _] = Repo.all(PendingTransactionOperation) end + test "from pbo to pto handles parameter overflow and still completes" do + block = insert(:block) + insert(:pending_block_operation, block_number: block.number, block_hash: block.hash) + + 5 + |> insert_list(:transaction) + |> with_block(block) + + json_rpc_config = Application.get_env(:explorer, :json_rpc_named_arguments) + + Application.put_env( + :explorer, + :json_rpc_named_arguments, + Keyword.put(json_rpc_config, :variant, EthereumJSONRPC.Geth) + ) + + geth_config = Application.get_env(:ethereum_jsonrpc, EthereumJSONRPC.Geth) + Application.put_env(:ethereum_jsonrpc, EthereumJSONRPC.Geth, Keyword.put(geth_config, :block_traceable?, false)) + + overflow_raised? = :atomics.new(1, []) + + :meck.new(Repo, [:passthrough]) + + :meck.expect(Repo, :insert_all, fn kind, elements, opts -> + if kind == PendingTransactionOperation and :atomics.get(overflow_raised?, 1) == 0 do + :atomics.put(overflow_raised?, 1, 1) + + raise Postgrex.QueryError, + message: "postgresql protocol can not handle 135090 parameters, the maximum is 65535" + else + :meck.passthrough([kind, elements, opts]) + end + end) + + on_exit(fn -> + try do + :meck.unload(Repo) + catch + _, _ -> :ok + end + end) + + SwitchPendingOperations.start_link([]) + Process.sleep(100) + + assert :atomics.get(overflow_raised?, 1) == 1 + assert [] = Repo.all(PendingBlockOperation) + assert [_, _, _, _, _] = Repo.all(PendingTransactionOperation) + end + test "from pto to pbo" do first_block = insert(:block) second_block = insert(:block) From 12995618664792012b48f6be81b7c96d7650215c Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Fri, 20 Mar 2026 15:46:13 +0300 Subject: [PATCH 28/42] fix: authentication provider token redis key (#14137) --- .../lib/explorer/third_party_integrations/auth0.ex | 8 +++++--- .../lib/explorer/third_party_integrations/keycloak.ex | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/apps/explorer/lib/explorer/third_party_integrations/auth0.ex b/apps/explorer/lib/explorer/third_party_integrations/auth0.ex index fe25e14f225d..151166b32bcb 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/auth0.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/auth0.ex @@ -5,7 +5,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do require Logger alias Explorer.Account.{Authentication, Identity} - alias Explorer.HttpClient + alias Explorer.{Helper, HttpClient} alias Explorer.ThirdPartyIntegrations.Auth0.Internal alias Explorer.ThirdPartyIntegrations.Dynamic alias Ueberauth.Strategy.Auth0.OAuth @@ -32,7 +32,7 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do """ @spec get_m2m_jwt() :: nil | String.t() def get_m2m_jwt do - get_m2m_jwt_inner(Redix.command(:redix, ["GET", Internal.redis_key()])) + get_m2m_jwt_inner(Redix.command(:redix, ["GET", m2m_jwt_key()])) end defp get_m2m_jwt_inner({:ok, token}) when not is_nil(token), do: token @@ -64,10 +64,12 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0 do end defp cache_token(token, ttl) do - Redix.command(:redix, ["SET", Internal.redis_key(), token, "EX", ttl]) + Redix.command(:redix, ["SET", m2m_jwt_key(), token, "EX", ttl]) token end + defp m2m_jwt_key, do: Helper.redis_key(Internal.redis_key()) + @doc """ Sends a one-time password (OTP) for linking an email to an existing account. diff --git a/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex index 74ec7e6b7e03..4c015a77302b 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex @@ -307,7 +307,7 @@ defmodule Explorer.ThirdPartyIntegrations.Keycloak do end end - defp admin_token_key, do: "keycloak:#{client_id()}:admin_token" + defp admin_token_key, do: Helper.redis_key("keycloak:#{client_id()}:admin_token") defp handle_error({:ok, response}, error_message) do Logger.error("#{error_message}: status=#{response.status_code} body=#{response.body}") From 9ffbf6b12fca2713c27343291e213e92b55177c3 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Fri, 20 Mar 2026 17:01:50 +0400 Subject: [PATCH 29/42] fix: Don't insert PTO for non-traceable transactions (#14133) --- .../chain/import/runner/transactions.ex | 1 + .../explorer/chain/pending_operations_helper.ex | 4 +++- apps/explorer/lib/explorer/chain/transaction.ex | 15 +++++++++++++++ .../test/explorer/chain/transaction_test.exs | 13 +++++++++++++ .../lib/indexer/fetcher/internal_transaction.ex | 17 +---------------- .../internal_transaction/delete_queue.ex | 1 + .../fetcher/on_demand/internal_transaction.ex | 2 +- .../fetcher/internal_transaction_test.exs | 13 ------------- 8 files changed, 35 insertions(+), 31 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/import/runner/transactions.ex b/apps/explorer/lib/explorer/chain/import/runner/transactions.ex index 71d80308394c..117302585f64 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/transactions.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/transactions.ex @@ -125,6 +125,7 @@ defmodule Explorer.Chain.Import.Runner.Transactions do sorted_pending_ops = inserted_transactions |> RangesHelper.filter_by_height_range(&RangesHelper.traceable_block_number?(&1.block_number)) + |> Transaction.filter_non_traceable_transactions() |> Enum.reject(&is_nil(&1.block_number)) |> Enum.map(&%{transaction_hash: &1.hash}) |> Enum.sort() diff --git a/apps/explorer/lib/explorer/chain/pending_operations_helper.ex b/apps/explorer/lib/explorer/chain/pending_operations_helper.ex index 59a8db61c0fa..32ac8f941be5 100644 --- a/apps/explorer/lib/explorer/chain/pending_operations_helper.ex +++ b/apps/explorer/lib/explorer/chain/pending_operations_helper.ex @@ -119,8 +119,10 @@ defmodule Explorer.Chain.PendingOperationsHelper do pto_params = Transaction |> where([t], t.block_number in ^pbo_block_numbers) - |> select([t], %{transaction_hash: t.hash}) + |> select([t], %{hash: t.hash, type: t.type}) |> Repo.all() + |> Transaction.filter_non_traceable_transactions() + |> Enum.map(&%{transaction_hash: &1.hash}) |> Helper.add_timestamps() case insert_pending_transaction_operations(pto_params) do diff --git a/apps/explorer/lib/explorer/chain/transaction.ex b/apps/explorer/lib/explorer/chain/transaction.ex index d22fd221eebc..0416b3b1861a 100644 --- a/apps/explorer/lib/explorer/chain/transaction.ex +++ b/apps/explorer/lib/explorer/chain/transaction.ex @@ -363,6 +363,8 @@ defmodule Explorer.Chain.Transaction do alias Explorer.Chain.SmartContract.Proxy.Models.Implementation + alias Explorer.Chain.Zilliqa.Helper, as: ZilliqaHelper + alias Explorer.SmartContract.SigProviderInterface @optional_attrs ~w(max_priority_fee_per_gas max_fee_per_gas block_hash block_number @@ -2338,6 +2340,19 @@ defmodule Explorer.Chain.Transaction do ) end + @zetachain_non_traceable_type 88 + @doc """ + Filters out transactions that are known to not have traceable internal transactions. + """ + @spec filter_non_traceable_transactions([__MODULE__.t() | map()]) :: [__MODULE__.t() | map()] + def filter_non_traceable_transactions(transactions) do + case Application.get_env(:explorer, :chain_type) do + :zetachain -> Enum.reject(transactions, &(Map.get(&1, :type) == @zetachain_non_traceable_type)) + :zilliqa -> Enum.reject(transactions, &ZilliqaHelper.scilla_transaction?(Map.get(&1, :type))) + _ -> transactions + end + end + if @chain_identity == {:optimism, :celo} do defp decode_remaining_transaction({nil, nil}, _, _, _, _), do: nil end diff --git a/apps/explorer/test/explorer/chain/transaction_test.exs b/apps/explorer/test/explorer/chain/transaction_test.exs index 6ac5e8bba1c7..0c21eb43f3da 100644 --- a/apps/explorer/test/explorer/chain/transaction_test.exs +++ b/apps/explorer/test/explorer/chain/transaction_test.exs @@ -1028,4 +1028,17 @@ defmodule Explorer.Chain.TransactionTest do |> Enum.reverse() end end + + describe "filter_non_traceable_transactions/1" do + test "does not raise when transaction params do not include type on zetachain" do + chain_type = Application.get_env(:explorer, :chain_type) + Application.put_env(:explorer, :chain_type, :zetachain) + + on_exit(fn -> Application.put_env(:explorer, :chain_type, chain_type) end) + + transaction_params = %{block_number: 13_393_871, hash: "0x123", index: 427} + + assert [transaction_params] == Transaction.filter_non_traceable_transactions([transaction_params]) + end + end end diff --git a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex index a806a1e40bec..b60cbcf9f056 100644 --- a/apps/indexer/lib/indexer/fetcher/internal_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/internal_transaction.ex @@ -24,7 +24,6 @@ defmodule Indexer.Fetcher.InternalTransaction do alias Explorer.Chain alias Explorer.Chain.{Block, Hash, PendingBlockOperation, PendingTransactionOperation, Transaction} alias Explorer.Chain.Cache.{Accounts, Blocks} - alias Explorer.Chain.Zilliqa.Helper, as: ZilliqaHelper alias Indexer.{BufferedTask, Tracer} alias Indexer.Fetcher.InternalTransaction.Supervisor, as: InternalTransactionSupervisor alias Indexer.Transform.{AddressCoinBalances, Addresses, AddressTokenBalances} @@ -258,7 +257,7 @@ defmodule Indexer.Fetcher.InternalTransaction do defp fetch_internal_transactions_by_transactions(transactions, json_rpc_named_arguments) do transactions - |> filter_non_traceable_transactions() + |> Transaction.filter_non_traceable_transactions() |> Enum.map(¶ms/1) |> case do [] -> @@ -274,20 +273,6 @@ defmodule Indexer.Fetcher.InternalTransaction do end end - # TODO: should we cover this with tests? - @zetachain_non_traceable_type 88 - @doc """ - Filters out transactions that are known to not have traceable internal transactions. - """ - @spec filter_non_traceable_transactions([Transaction.t() | map()]) :: [Transaction.t() | map()] - def filter_non_traceable_transactions(transactions) do - case Application.get_env(:explorer, :chain_type) do - :zetachain -> Enum.reject(transactions, &(Map.get(&1, :type) == @zetachain_non_traceable_type)) - :zilliqa -> Enum.reject(transactions, &ZilliqaHelper.scilla_transaction?/1) - _ -> transactions - end - end - defp safe_import_internal_transaction(internal_transactions_params, block_numbers, data_type) do import_internal_transaction(internal_transactions_params, block_numbers, data_type) rescue diff --git a/apps/indexer/lib/indexer/fetcher/internal_transaction/delete_queue.ex b/apps/indexer/lib/indexer/fetcher/internal_transaction/delete_queue.ex index b708f9c96356..3b79120e32a6 100644 --- a/apps/indexer/lib/indexer/fetcher/internal_transaction/delete_queue.ex +++ b/apps/indexer/lib/indexer/fetcher/internal_transaction/delete_queue.ex @@ -92,6 +92,7 @@ defmodule Indexer.Fetcher.InternalTransaction.DeleteQueue do pto_params = transactions + |> Transaction.filter_non_traceable_transactions() |> Enum.map(&%{transaction_hash: &1.hash}) |> ExplorerHelper.add_timestamps() diff --git a/apps/indexer/lib/indexer/fetcher/on_demand/internal_transaction.ex b/apps/indexer/lib/indexer/fetcher/on_demand/internal_transaction.ex index 4e3a15d5ce5f..2f13955a29da 100644 --- a/apps/indexer/lib/indexer/fetcher/on_demand/internal_transaction.ex +++ b/apps/indexer/lib/indexer/fetcher/on_demand/internal_transaction.ex @@ -539,7 +539,7 @@ defmodule Indexer.Fetcher.OnDemand.InternalTransaction do Enum.reduce(block_numbers, [], fn block_number, acc_list -> block_number |> Transaction.get_transactions_of_block_number() - |> InternalTransactionFetcher.filter_non_traceable_transactions() + |> Transaction.filter_non_traceable_transactions() |> Enum.map( &%{ block_number: &1.block_number, diff --git a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs index de6575d02237..0855a4c3d1fd 100644 --- a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs +++ b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs @@ -524,19 +524,6 @@ defmodule Indexer.Fetcher.InternalTransactionTest do assert %{block_number: ^block_number, block_hash: ^block_hash} = Repo.one(PendingBlockOperation) end - describe "filter_non_traceable_transactions/1" do - test "does not raise when transaction params do not include type on zetachain" do - chain_type = Application.get_env(:explorer, :chain_type) - Application.put_env(:explorer, :chain_type, :zetachain) - - on_exit(fn -> Application.put_env(:explorer, :chain_type, chain_type) end) - - transaction_params = %{block_number: 13_393_871, hash: "0x123", index: 427} - - assert [transaction_params] == InternalTransaction.filter_non_traceable_transactions([transaction_params]) - end - end - if Application.compile_env(:explorer, :chain_type) == :arbitrum do test "fetches internal transactions from Arbitrum", %{ json_rpc_named_arguments: json_rpc_named_arguments From 7d669c75c847bf1030d5962e6390e5c320dbb1e6 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Mon, 23 Mar 2026 19:33:07 +0300 Subject: [PATCH 30/42] fix: Notify.check_auth0 for Keycloak and Dynamic (#14146) --- apps/explorer/lib/explorer/account/notify.ex | 11 +++++------ .../lib/explorer/third_party_integrations/dynamic.ex | 7 ++++++- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/apps/explorer/lib/explorer/account/notify.ex b/apps/explorer/lib/explorer/account/notify.ex index 8e717f538f41..b731ea81a4a9 100644 --- a/apps/explorer/lib/explorer/account/notify.ex +++ b/apps/explorer/lib/explorer/account/notify.ex @@ -5,6 +5,7 @@ defmodule Explorer.Account.Notify do alias Explorer.Account alias Explorer.Account.Notifier.Notify + alias Explorer.ThirdPartyIntegrations.{Auth0, Dynamic, Keycloak} require Logger @@ -24,15 +25,13 @@ defmodule Explorer.Account.Notify do end defp check_envs do - check_auth0() + check_authentication_provider() check_sendgrid() end - defp check_auth0 do - (Application.get_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth)[:client_id] && - Application.get_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth)[:client_secret] && - Application.get_env(:ueberauth, Ueberauth.Strategy.Auth0.OAuth)[:domain]) || - raise "Auth0 not configured" + defp check_authentication_provider do + Auth0.enabled?() || Keycloak.enabled?() || Dynamic.enabled?() || + raise "No authentication provider configured" end defp check_sendgrid do diff --git a/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex b/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex index abcd45708093..16f96ef20518 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/dynamic.ex @@ -17,6 +17,11 @@ defmodule Explorer.ThirdPartyIntegrations.Dynamic do require Logger + @spec enabled? :: boolean() + def enabled? do + Application.get_env(:explorer, __MODULE__)[:enabled] || false + end + @doc """ Authenticates a user by verifying a JWT token and extracting identity information from its claims. @@ -39,7 +44,7 @@ defmodule Explorer.ThirdPartyIntegrations.Dynamic do """ @spec get_auth_from_token(String.t()) :: {:ok, Auth.t()} | {:error, String.t()} | {:enabled, false} def get_auth_from_token(token) do - with {:enabled, true} <- {:enabled, Application.get_env(:explorer, __MODULE__)[:enabled]}, + with {:enabled, true} <- {:enabled, enabled?()}, {:ok, claims} <- Token.verify_and_validate(token) do create_auth(claims) else From 0175ee3eb1fdec9a05f3ff2c3df67f7d1ea3cccb Mon Sep 17 00:00:00 2001 From: Glencorse033 <103339634+Glencorse033@users.noreply.github.com> Date: Tue, 24 Mar 2026 07:40:46 +0100 Subject: [PATCH 31/42] feat: add validation for IPFS links before sending requests to gateway (#14131) Co-authored-by: glencorse033 Co-authored-by: Victor Baranov --- .../lib/explorer/token/metadata_retriever.ex | 59 ++++++++++++------- .../token/metadata_retriever_test.exs | 33 +++++++++++ .../lib/nft_media_handler.ex | 39 ++++++------ cspell.json | 5 ++ 4 files changed, 98 insertions(+), 38 deletions(-) diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index 39d8a0eaf95d..ab83d3b0734a 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -13,6 +13,7 @@ defmodule Explorer.Token.MetadataRetriever do @no_uri_error "no uri" @vm_execution_error "VM execution error" @invalid_base64_data "invalid data:application/json;base64" + @invalid_ipfs_path "invalid ipfs path" @default_headers [{"User-Agent", "blockscout-10.0.6"}] # https://eips.ethereum.org/EIPS/eip-1155#metadata @@ -713,41 +714,41 @@ defmodule Explorer.Token.MetadataRetriever do case URI.parse(token_uri_string) do %URI{scheme: "ipfs", host: host, path: path} -> resource_id = - if host == "ipfs" do - "/" <> resource_id = path - resource_id - else - # credo:disable-for-next-line - if is_nil(path), do: host, else: host <> path + cond do + host == "ipfs" and is_binary(path) and String.starts_with?(path, "/") -> + String.replace_leading(path, "/", "") + + is_binary(host) and is_binary(path) -> + host <> path + + is_binary(host) and is_nil(path) -> + host + + true -> + nil end - fetch_from_ipfs(resource_id, hex_token_id) + fetch_from_ipfs_if_valid_path(resource_id, hex_token_id) %URI{scheme: "ar", host: _host, path: resource_id} -> fetch_from_arweave(resource_id, hex_token_id) %URI{scheme: _, path: "/ipfs/" <> resource_id} -> - fetch_from_ipfs(resource_id, hex_token_id) + fetch_from_ipfs_if_valid_path(resource_id, hex_token_id) %URI{scheme: _, path: "ipfs/" <> resource_id} -> - fetch_from_ipfs(resource_id, hex_token_id) + fetch_from_ipfs_if_valid_path(resource_id, hex_token_id) %URI{scheme: scheme} when not is_nil(scheme) -> fetch_metadata_inner(token_uri_string, ipfs_params, token_id, hex_token_id, from_base_uri?) %URI{path: path} -> - case path do - "Qm" <> <<_::binary-size(44)>> = resource_id -> - fetch_from_ipfs(resource_id, hex_token_id) + if is_binary(path) and valid_ipfs_path?(public_ipfs_link(path)) do + fetch_from_ipfs(path, hex_token_id) + else + json = ExplorerHelper.decode_json(token_uri_string, true) - # todo: rewrite for strict CID v1 support - "bafybe" <> _ = resource_id -> - fetch_from_ipfs(resource_id, hex_token_id) - - _ -> - json = ExplorerHelper.decode_json(token_uri_string, true) - - check_type(json, hex_token_id) + check_type(json, hex_token_id) end end rescue @@ -988,6 +989,24 @@ defmodule Explorer.Token.MetadataRetriever do String.replace(token_uri, @erc1155_token_id_placeholder, hex_token_id) end + def valid_ipfs_path?(path) when is_binary(path) do + # CIDv0: ipfs://Qm[1-9A-HJ-NP-Za-km-z]{44} + # CIDv1: ipfs://b[a-z2-7]{7,} + # Path format: ipfs://[CID]/optional/path + ipfs_path_regular_expression = ~r/^ipfs:\/\/(Qm[1-9A-HJ-NP-Za-km-z]{44}|b[a-z2-7]{7,})(\/.*)?$/ + String.match?(path, ipfs_path_regular_expression) + end + + def valid_ipfs_path?(_), do: false + + defp fetch_from_ipfs_if_valid_path(resource_id, hex_token_id) do + if is_binary(resource_id) and valid_ipfs_path?(public_ipfs_link(resource_id)) do + fetch_from_ipfs(resource_id, hex_token_id) + else + {:error, @invalid_ipfs_path} + end + end + @doc """ Truncate error string to @max_error_length symbols """ diff --git a/apps/explorer/test/explorer/token/metadata_retriever_test.exs b/apps/explorer/test/explorer/token/metadata_retriever_test.exs index 9d19c0fddbbc..ab74cfef9bb3 100644 --- a/apps/explorer/test/explorer/token/metadata_retriever_test.exs +++ b/apps/explorer/test/explorer/token/metadata_retriever_test.exs @@ -1242,4 +1242,37 @@ defmodule Explorer.Token.MetadataRetrieverTest do assert MetadataRetriever.arweave_link(data) == expected_link end end + + describe "IPFS link validation" do + test "valid_ipfs_path?/1 returns true for valid CIDv0 (Qm...)" do + valid_cid_v0 = "ipfs://QmXoypizjW3WknFiJnKLwHCnL72vedxjQkDDP1mXWo6uco" + assert MetadataRetriever.valid_ipfs_path?(valid_cid_v0) + end + + test "valid_ipfs_path?/1 returns true for valid CIDv1 (b...)" do + valid_cid_v1 = "ipfs://bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3hlgtif73fqpyae" + assert MetadataRetriever.valid_ipfs_path?(valid_cid_v1) + end + + test "valid_ipfs_path?/1 returns true for short valid CIDv1" do + short_cid_v1 = "ipfs://bafybeic" + assert MetadataRetriever.valid_ipfs_path?(short_cid_v1) + end + + test "valid_ipfs_path?/1 returns false for empty string" do + refute MetadataRetriever.valid_ipfs_path?("") + end + + test "valid_ipfs_path?/1 returns false for malformed path like ipfs://invalid" do + refute MetadataRetriever.valid_ipfs_path?("ipfs://invalid") + end + + test "HTTP request is NOT made when path is invalid" do + invalid_path = "ipfs://invalid" + + # We assert that it returns the error immediately without any HTTP mock being called. + # If it tried to make a request, it would fail because no expectation is set for this URL. + assert MetadataRetriever.fetch_json({:ok, [invalid_path]}) == {:error, "invalid ipfs path"} + end + end end diff --git a/apps/nft_media_handler/lib/nft_media_handler.ex b/apps/nft_media_handler/lib/nft_media_handler.ex index bebf45ed2824..12bdc5e91d41 100644 --- a/apps/nft_media_handler/lib/nft_media_handler.ex +++ b/apps/nft_media_handler/lib/nft_media_handler.ex @@ -163,42 +163,45 @@ defmodule NFTMediaHandler do case URI.parse(uri) do %URI{scheme: "ipfs", host: host, path: path} -> resource_id = - with "ipfs" <- host, - "/" <> resource_id <- path do - resource_id - else - _ -> build_ipfs_resource_id(host, path) + cond do + host == "ipfs" and is_binary(path) and String.starts_with?(path, "/") -> + String.replace_leading(path, "/", "") + + is_binary(host) and host != "" -> + build_ipfs_resource_id(host, path) + + true -> + path end - {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + maybe_fetch_ipfs_url(resource_id, uri) %URI{scheme: "ar", host: _host, path: resource_id} -> {TokenMetadataRetriever.arweave_link(resource_id), TokenMetadataRetriever.ar_headers()} %URI{scheme: _, path: "/ipfs/" <> resource_id} -> - {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + maybe_fetch_ipfs_url(resource_id, uri) %URI{scheme: _, path: "ipfs/" <> resource_id} -> - {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + maybe_fetch_ipfs_url(resource_id, uri) %URI{scheme: scheme} when not is_nil(scheme) -> {uri, []} %URI{path: path} -> - case path do - "Qm" <> <<_::binary-size(44)>> = resource_id -> - {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} - - "bafybe" <> _ = resource_id -> - {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} - - _ -> - {uri, []} - end + maybe_fetch_ipfs_url(path, uri) end end defp build_ipfs_resource_id(host, path) do if is_nil(path), do: host, else: host <> path end + + defp maybe_fetch_ipfs_url(resource_id, uri) do + if is_binary(resource_id) and TokenMetadataRetriever.valid_ipfs_path?("ipfs://" <> resource_id) do + {TokenMetadataRetriever.ipfs_link(resource_id), TokenMetadataRetriever.ipfs_headers()} + else + {uri, []} + end + end end diff --git a/cspell.json b/cspell.json index 526572a5a466..5ddf417d0adc 100644 --- a/cspell.json +++ b/cspell.json @@ -73,8 +73,10 @@ "badpassword", "badrpc", "bafybe", + "bafybeic", "bafybeid", "bafybeig", + "bafybeigdyrzt", "bafybeihxuj", "balancemulti", "benchee", @@ -142,6 +144,7 @@ "childspec", "chromedriver", "citext", + "cidv", "Cldr", "clearfix", "clickover", @@ -357,6 +360,8 @@ "inversed", "iolist", "ipfs", + "ipfslink", + "ipfsuid", "ipos", "itxs", "johnnny", From fee34c25443fe3bbeffbcfdac4d75b2414f7746c Mon Sep 17 00:00:00 2001 From: Fedor Ivanov Date: Tue, 24 Mar 2026 10:19:27 +0300 Subject: [PATCH 32/42] feat: add ENS and metadata preloading in block channel (#12074) Co-authored-by: Victor Baranov --- .../lib/block_scout_web/notifier.ex | 61 +++++- .../channels/v2/block_channel_test.exs | 199 +++++++++++++++++- .../chain/address/metadata_preloader.ex | 18 ++ .../explorer/microservice_interfaces/bens.ex | 10 +- .../microservice_interfaces/metadata.ex | 10 +- .../lib/explorer/utility/microservice.ex | 9 +- config/runtime.exs | 4 + docker-compose/envs/common-blockscout.env | 1 + 8 files changed, 304 insertions(+), 8 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/notifier.ex b/apps/block_scout_web/lib/block_scout_web/notifier.ex index 60d3681a38f0..ca28020d00c8 100644 --- a/apps/block_scout_web/lib/block_scout_web/notifier.ex +++ b/apps/block_scout_web/lib/block_scout_web/notifier.ex @@ -6,6 +6,16 @@ defmodule BlockScoutWeb.Notifier do chain_type: [:explorer, :chain_type], chain_identity: [:explorer, :chain_identity] + use Utils.RuntimeEnvHelper, + block_broadcast_enrichment_disabled?: [ + :block_scout_web, + [BlockScoutWeb.Notifier, :block_broadcast_enrichment_disabled] + ], + block_broadcast_enrichment_timeout: [ + :block_scout_web, + [BlockScoutWeb.Notifier, :block_broadcast_enrichment_timeout] + ] + require Logger alias Absinthe.Subscription @@ -46,10 +56,13 @@ defmodule BlockScoutWeb.Notifier do alias Explorer.Chain.Cache.Counters.{AddressesCount, AverageBlockTime, Helper} alias Explorer.Chain.Supply.RSK alias Explorer.Chain.Transaction.History.TransactionStats + alias Explorer.MicroserviceInterfaces.{BENS, Metadata} alias Explorer.SmartContract.{CompilerVersion, Solidity.CodeCompiler} alias Phoenix.View alias Timex.Duration + import Explorer.MicroserviceInterfaces.BENS, only: [maybe_preload_ens_to_block: 1] + import Explorer.MicroserviceInterfaces.Metadata, only: [maybe_preload_metadata_to_block: 1] import Explorer.Chain.SmartContract.Proxy.Models.Implementation, only: [proxy_implementations_association: 0] @check_broadcast_sequence_period 500 @@ -632,11 +645,15 @@ defmodule BlockScoutWeb.Notifier do defp broadcast_block(block) do preloaded_block = - Repo.preload(block, [ + block + |> Repo.preload([ [miner: [:names, :smart_contract, proxy_implementations_association()]], :transactions, :rewards ]) + # TODO: theoretically might introduce performance issues, + # consider async broadcast of enrichment data + |> maybe_preload_enrichment_for_broadcast() average_block_time = AverageBlockTime.average_block_time() @@ -664,6 +681,48 @@ defmodule BlockScoutWeb.Notifier do Endpoint.broadcast("blocks:#{to_string(block.miner_hash)}", "new_block", block_params_v2) end + defp maybe_preload_enrichment_for_broadcast(block) do + if !block_broadcast_enrichment_disabled?() and (BENS.enabled?() or Metadata.enabled?()) do + preload_enrichment_for_broadcast(block) + else + block + end + end + + defp preload_enrichment_for_broadcast(block) do + timeout = block_broadcast_enrichment_timeout() + + results = + Task.Supervisor.async_stream_nolink( + Explorer.TaskSupervisor, + [ + {:ens_domain_name, &maybe_preload_ens_to_block/1}, + {:metadata, &maybe_preload_metadata_to_block/1} + ], + fn {field, preload_fun} -> + {field, preload_fun.(block)} + end, + timeout: timeout, + on_timeout: :kill_task, + ordered: false + ) + + Enum.reduce(results, block, fn + {:ok, {field, enriched_block}}, acc -> merge_enriched_miner_field(acc, enriched_block, field) + _, acc -> acc + end) + end + + defp merge_enriched_miner_field(%{miner: %{} = miner} = block, %{miner: %{} = enriched_miner}, field) do + case Map.fetch(enriched_miner, field) do + {:ok, nil} -> block + {:ok, value} -> %{block | miner: Map.replace(miner, field, value)} + :error -> block + end + end + + defp merge_enriched_miner_field(block, _enriched_block, _field), do: block + defp broadcast_rewards(rewards) do preloaded_rewards = Repo.preload(rewards, [:address, :block]) emission_reward = Enum.find(preloaded_rewards, fn reward -> reward.address_type == :emission_funds end) diff --git a/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs b/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs index febce4453f38..116a00d00efa 100644 --- a/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs +++ b/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs @@ -3,11 +3,22 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do alias BlockScoutWeb.Notifier alias Explorer.Chain.Cache.Counters.AverageBlockTime + alias Plug.Conn - test "subscribed user is notified of new_block event" do + setup do + old_notifier = Application.get_env(:block_scout_web, Notifier, []) topic = "blocks:new_block" @endpoint.subscribe(topic) + on_exit(fn -> + Application.put_env(:block_scout_web, Notifier, old_notifier) + Phoenix.PubSub.unsubscribe(BlockScoutWeb.PubSub, topic) + end) + + {:ok, topic: topic} + end + + test "subscribed user is notified of new_block event", %{topic: topic} do block = insert(:block, number: 1) start_supervised!(AverageBlockTime) @@ -28,7 +39,7 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do end end - test "user is able to join to common channels" do + test "user is able to join to common channels", %{topic: topic} do common_channels = ["new_block", "indexing", "indexing_internal_transactions"] Enum.each(common_channels, fn channel -> @@ -38,4 +49,188 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do |> subscribe_and_join("blocks:#{channel}") end) end + + test "new_block payload includes miner ENS and metadata when microservices are enabled", %{topic: topic} do + bypass = Bypass.open() + + old_chain_id = Application.get_env(:block_scout_web, :chain_id) + old_bens = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.BENS) + old_metadata = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.Metadata) + old_tesla_adapter = Application.get_env(:tesla, :adapter) + + Application.put_env(:tesla, :adapter, Tesla.Adapter.Mint) + + chain_id = 1 + Application.put_env(:block_scout_web, :chain_id, chain_id) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, + service_url: "http://localhost:#{bypass.port}", + enabled: true, + protocols: [] + ) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, + service_url: "http://localhost:#{bypass.port}", + enabled: true + ) + + on_exit(fn -> + Bypass.down(bypass) + Application.put_env(:block_scout_web, :chain_id, old_chain_id) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, old_bens) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, old_metadata) + Application.put_env(:tesla, :adapter, old_tesla_adapter) + end) + + miner = insert(:address) + + Bypass.expect_once(bypass, "POST", "/api/v1/#{chain_id}/addresses:batch_resolve_names", fn conn -> + Conn.resp( + conn, + 200, + Jason.encode!(%{ + "names" => %{ + to_string(miner.hash) => "miner.eth" + } + }) + ) + end) + + Bypass.expect_once(bypass, "GET", "/api/v1/metadata", fn conn -> + Conn.resp( + conn, + 200, + Jason.encode!(%{ + "addresses" => %{ + to_string(miner.hash) => %{ + "tags" => [] + } + } + }) + ) + end) + + block = insert(:block, number: 1, miner: miner) + + start_supervised!(AverageBlockTime) + Application.put_env(:explorer, AverageBlockTime, enabled: true, cache_period: 1_800_000) + + on_exit(fn -> + Application.put_env(:explorer, AverageBlockTime, enabled: false, cache_period: 1_800_000) + end) + + Notifier.handle_event({:chain_event, :blocks, :realtime, [block]}) + + receive do + %Phoenix.Socket.Broadcast{topic: ^topic, event: "new_block", payload: %{block: block_payload}} -> + assert block_payload["miner"]["ens_domain_name"] == "miner.eth" + assert block_payload["miner"]["metadata"] == %{"tags" => []} + after + :timer.seconds(5) -> + assert false, "Expected message received nothing." + end + end + + test "new_block broadcast skips enrichment when DISABLE_BLOCK_BROADCAST_ENRICHMENT is set", %{topic: topic} do + bypass = Bypass.open() + + old_chain_id = Application.get_env(:block_scout_web, :chain_id) + old_bens = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.BENS) + old_metadata = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.Metadata) + + Application.put_env(:block_scout_web, :chain_id, 1) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, + service_url: "http://localhost:#{bypass.port}", + enabled: true, + protocols: [] + ) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, + service_url: "http://localhost:#{bypass.port}", + enabled: true + ) + + Application.put_env(:block_scout_web, Notifier, block_broadcast_enrichment_disabled: true) + + on_exit(fn -> + Bypass.down(bypass) + Application.put_env(:block_scout_web, :chain_id, old_chain_id) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, old_bens) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, old_metadata) + end) + + # No Bypass.expect calls — any HTTP call to the microservices would cause Bypass to raise + Bypass.pass(bypass) + + miner = insert(:address) + block = insert(:block, number: 1, miner: miner) + + start_supervised!(AverageBlockTime) + Application.put_env(:explorer, AverageBlockTime, enabled: true, cache_period: 1_800_000) + + on_exit(fn -> + Application.put_env(:explorer, AverageBlockTime, enabled: false, cache_period: 1_800_000) + end) + + Notifier.handle_event({:chain_event, :blocks, :realtime, [block]}) + + receive do + %Phoenix.Socket.Broadcast{topic: ^topic, event: "new_block", payload: %{block: block_payload}} -> + assert is_nil(block_payload["miner"]["ens_domain_name"]) + after + :timer.seconds(5) -> + assert false, "Expected message received nothing." + end + end + + test "new_block broadcast falls back quickly when enrichment services are unavailable", %{topic: topic} do + old_chain_id = Application.get_env(:block_scout_web, :chain_id) + old_bens = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.BENS) + old_metadata = Application.get_env(:explorer, Explorer.MicroserviceInterfaces.Metadata) + old_tesla_adapter = Application.get_env(:tesla, :adapter) + Application.put_env(:tesla, :adapter, Tesla.Adapter.Mint) + + chain_id = 1 + Application.put_env(:block_scout_web, :chain_id, chain_id) + + Application.put_env(:block_scout_web, Notifier, block_broadcast_enrichment_timeout: 50) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, + service_url: "http://127.0.0.1:9", + enabled: true, + protocols: [] + ) + + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, + service_url: "http://127.0.0.1:9", + enabled: true + ) + + on_exit(fn -> + Application.put_env(:block_scout_web, :chain_id, old_chain_id) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.BENS, old_bens) + Application.put_env(:explorer, Explorer.MicroserviceInterfaces.Metadata, old_metadata) + Application.put_env(:tesla, :adapter, old_tesla_adapter) + end) + + miner = insert(:address) + + block = insert(:block, number: 1, miner: miner) + + start_supervised!(AverageBlockTime) + Application.put_env(:explorer, AverageBlockTime, enabled: true, cache_period: 1_800_000) + + on_exit(fn -> + Application.put_env(:explorer, AverageBlockTime, enabled: false, cache_period: 1_800_000) + end) + + timeout = + Application.get_env(:block_scout_web, Notifier, []) + |> Keyword.get(:block_broadcast_enrichment_timeout, 200) + + Notifier.handle_event({:chain_event, :blocks, :realtime, [block]}) + + assert_receive %Phoenix.Socket.Broadcast{topic: ^topic, event: "new_block", payload: %{block: _}}, timeout + 200 + end end diff --git a/apps/explorer/lib/explorer/chain/address/metadata_preloader.ex b/apps/explorer/lib/explorer/chain/address/metadata_preloader.ex index 36a68fa56545..820dc05dfad9 100644 --- a/apps/explorer/lib/explorer/chain/address/metadata_preloader.ex +++ b/apps/explorer/lib/explorer/chain/address/metadata_preloader.ex @@ -62,6 +62,15 @@ defmodule Explorer.Chain.Address.MetadataPreloader do address_with_ens end + @doc """ + Preloads ENS name to Block.t() + """ + @spec preload_ens_to_block(Block.t()) :: Block.t() + def preload_ens_to_block(block) do + [block_with_ens] = preload_ens_to_list([block]) + block_with_ens + end + @doc """ Preloads ENS names to list of supported entities """ @@ -113,6 +122,15 @@ defmodule Explorer.Chain.Address.MetadataPreloader do transaction_with_metadata end + @doc """ + Preloads metadata to Block.t() + """ + @spec preload_metadata_to_block(Block.t()) :: Block.t() + def preload_metadata_to_block(block) do + [block_with_metadata] = preload_metadata_to_list([block]) + block_with_metadata + end + @doc """ Preload ENS info to search result, using get_address/1 """ diff --git a/apps/explorer/lib/explorer/microservice_interfaces/bens.ex b/apps/explorer/lib/explorer/microservice_interfaces/bens.ex index 1771b295274d..61f1d7c5f45c 100644 --- a/apps/explorer/lib/explorer/microservice_interfaces/bens.ex +++ b/apps/explorer/lib/explorer/microservice_interfaces/bens.ex @@ -6,7 +6,7 @@ defmodule Explorer.MicroserviceInterfaces.BENS do alias Explorer.{Chain, HttpClient} alias Explorer.Chain.Address.MetadataPreloader - alias Explorer.Chain.{Address, Transaction} + alias Explorer.Chain.{Address, Block, Transaction} alias Explorer.Utility.Microservice @@ -317,4 +317,12 @@ defmodule Explorer.MicroserviceInterfaces.BENS do def maybe_preload_ens_to_address(address) do maybe_preload_meta(address, __MODULE__, &MetadataPreloader.preload_ens_to_address/1) end + + @doc """ + Preloads ENS data to the block if BENS is enabled + """ + @spec maybe_preload_ens_to_block(Block.t()) :: Block.t() + def maybe_preload_ens_to_block(block) do + maybe_preload_meta(block, __MODULE__, &MetadataPreloader.preload_ens_to_block/1) + end end diff --git a/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex b/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex index 34ac1ea20873..40e63f2017b2 100644 --- a/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex +++ b/apps/explorer/lib/explorer/microservice_interfaces/metadata.ex @@ -4,7 +4,7 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do """ alias Explorer.{Chain, HttpClient} - alias Explorer.Chain.{Address.MetadataPreloader, Transaction} + alias Explorer.Chain.{Address.MetadataPreloader, Block, Transaction} alias Explorer.Utility.Microservice import Explorer.MicroserviceInterfaces.BENS, only: [maybe_preload_ens: 1] @@ -182,6 +182,14 @@ defmodule Explorer.MicroserviceInterfaces.Metadata do maybe_preload_meta(transaction, __MODULE__, &MetadataPreloader.preload_metadata_to_transaction/1) end + @doc """ + Preloads metadata to block if Metadata microservice is enabled + """ + @spec maybe_preload_metadata_to_block(Block.t()) :: Block.t() + def maybe_preload_metadata_to_block(block) do + maybe_preload_meta(block, __MODULE__, &MetadataPreloader.preload_metadata_to_block/1) + end + defp decode_meta({:ok, %{"addresses" => addresses} = result}) do prepared_address = Enum.reduce(addresses, %{}, fn {address, meta}, acc -> diff --git a/apps/explorer/lib/explorer/utility/microservice.ex b/apps/explorer/lib/explorer/utility/microservice.ex index 613449bf1c04..305ea86f2c11 100644 --- a/apps/explorer/lib/explorer/utility/microservice.ex +++ b/apps/explorer/lib/explorer/utility/microservice.ex @@ -10,7 +10,7 @@ defmodule Explorer.Utility.Microservice do """ @spec base_url(atom(), atom()) :: nil | binary() def base_url(application \\ :explorer, module) do - url = Application.get_env(application, module)[:service_url] + url = config(application, module)[:service_url] if UtilsConfigHelper.valid_url?(url), do: url, else: nil end @@ -20,7 +20,7 @@ defmodule Explorer.Utility.Microservice do """ @spec check_enabled(atom(), atom()) :: :ok | {:error, :disabled} def check_enabled(application \\ :explorer, module) do - if Application.get_env(application, module)[:enabled] && base_url(application, module) do + if config(application, module)[:enabled] && base_url(application, module) do :ok else {:error, :disabled} @@ -38,6 +38,9 @@ defmodule Explorer.Utility.Microservice do """ @spec api_key(atom(), atom()) :: String.t() | nil def api_key(application \\ :explorer, module) do - Application.get_env(application, module)[:api_key] + config(application, module)[:api_key] end + + @spec config(atom(), atom()) :: keyword() + defp config(application, module), do: Application.get_env(application, module, []) end diff --git a/config/runtime.exs b/config/runtime.exs index b3c7083431d1..91e5b47c9836 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -119,6 +119,10 @@ config :block_scout_web, BlockScoutWeb.Chain, enable_testnet_label: ConfigHelper.parse_bool_env_var("SHOW_TESTNET_LABEL"), testnet_label_text: System.get_env("TESTNET_LABEL_TEXT", "Testnet") +config :block_scout_web, BlockScoutWeb.Notifier, + block_broadcast_enrichment_timeout: 200, + block_broadcast_enrichment_disabled: ConfigHelper.parse_bool_env_var("DISABLE_BLOCK_BROADCAST_ENRICHMENT") + config :block_scout_web, :footer, logo: System.get_env("FOOTER_LOGO"), chat_link: System.get_env("FOOTER_CHAT_LINK", "https://discord.gg/blockscout"), diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index f21f1cb0d004..a5dc8d7d8841 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -144,6 +144,7 @@ RELEASE_LINK= # CONTRACT_CERTIFIED_LIST= # CONTRACT_ENABLE_PARTIAL_REVERIFICATION= # UNCLES_IN_AVERAGE_BLOCK_TIME=false +# DISABLE_BLOCK_BROADCAST_ENRICHMENT=false # DISABLE_WEBAPP=true ADMIN_PANEL_ENABLED=false # API_V2_ENABLED=true From 78e9c5aefd2cff26110c78f10083fb66b5f08ca6 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 24 Mar 2026 12:50:09 +0300 Subject: [PATCH 33/42] Allow manually trigger main CI --- .github/workflows/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/config.yml b/.github/workflows/config.yml index baac41c80403..285ecb88cc7d 100644 --- a/.github/workflows/config.yml +++ b/.github/workflows/config.yml @@ -18,12 +18,12 @@ on: - production-xdai - production-zkevm - production-zksync - - staging-l2 paths-ignore: - "CHANGELOG.md" - "**/README.md" - "docker/*" - "docker-compose/*" + workflow_dispatch: pull_request: types: [opened, synchronize, reopened, labeled] branches: From 3eb34f6903745f1934719d56bd5d195d5c49e193 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Tue, 24 Mar 2026 12:53:44 +0300 Subject: [PATCH 34/42] Remove custom branches from main CI workflow --- .github/workflows/config.yml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/.github/workflows/config.yml b/.github/workflows/config.yml index 285ecb88cc7d..34370319cf40 100644 --- a/.github/workflows/config.yml +++ b/.github/workflows/config.yml @@ -4,20 +4,6 @@ on: push: branches: - master - - production-arbitrum - - production-core - - production-eth-sepolia - - production-filecoin - - production-fuse - - production-optimism - - production-immutable - - production-iota - - production-lukso - - production-rsk - - production-sokol - - production-xdai - - production-zkevm - - production-zksync paths-ignore: - "CHANGELOG.md" - "**/README.md" From 21f7ef45cbe74ed583042692701172a79a211821 Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Tue, 24 Mar 2026 14:20:24 +0400 Subject: [PATCH 35/42] fix: Fix token transfers block_consensus setting (#14005) --- .../explorer/chain/import/runner/blocks.ex | 20 ++++++++++--------- ...ken_transfer_block_consensus_migration.exs | 7 +++++++ 2 files changed, 18 insertions(+), 9 deletions(-) create mode 100644 apps/explorer/priv/repo/migrations/20260217131711_reset_token_transfer_block_consensus_migration.exs diff --git a/apps/explorer/lib/explorer/chain/import/runner/blocks.ex b/apps/explorer/lib/explorer/chain/import/runner/blocks.ex index e31149e1d821..d7a1a7e9f15f 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/blocks.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/blocks.ex @@ -521,10 +521,12 @@ defmodule Explorer.Chain.Import.Runner.Blocks do defp on_conflict_chain_type_extension(_), do: nil - defp consensus_block_numbers(blocks_changes) when is_list(blocks_changes) do + defp consensus_block_identifiers(blocks_changes) when is_list(blocks_changes) do blocks_changes |> Enum.filter(& &1.consensus) - |> Enum.map(& &1.number) + |> Enum.reduce({[], []}, fn block_change, {numbers, hashes} -> + {[block_change.number | numbers], [block_change.hash | hashes]} + end) end # Handles block consensus loss. @@ -547,7 +549,7 @@ defmodule Explorer.Chain.Import.Runner.Blocks do } = _opts ) do hashes = Enum.map(changes_list, & &1.hash) - consensus_block_numbers = consensus_block_numbers(changes_list) + {consensus_block_numbers, consensus_hashes} = consensus_block_identifiers(changes_list) acquire_query = from( @@ -596,7 +598,7 @@ defmodule Explorer.Chain.Import.Runner.Blocks do join: s in subquery(acquire_query), on: transaction.block_hash == s.hash, # we don't want to remove consensus from blocks that will be upserted - where: transaction.block_hash not in ^hashes + where: transaction.block_hash not in ^consensus_hashes ), [set: [block_consensus: false, updated_at: updated_at]], timeout: timeout @@ -606,9 +608,9 @@ defmodule Explorer.Chain.Import.Runner.Blocks do from( token_transfer in TokenTransfer, join: s in subquery(acquire_query), - on: token_transfer.block_number == s.number, + on: token_transfer.block_number == s.number and token_transfer.block_hash == s.hash, # we don't want to remove consensus from blocks that will be upserted - where: token_transfer.block_hash not in ^hashes + where: token_transfer.block_hash not in ^consensus_hashes ), [set: [block_consensus: false, updated_at: updated_at]], timeout: timeout @@ -621,7 +623,7 @@ defmodule Explorer.Chain.Import.Runner.Blocks do join: s in subquery(acquire_query), on: t.block_hash == s.hash, # we don't want to remove contract code from blocks that will be upserted - where: t.block_hash not in ^hashes, + where: t.block_hash not in ^consensus_hashes, where: not is_nil(t.created_contract_address_hash), select: t.created_contract_address_hash ) @@ -650,8 +652,8 @@ defmodule Explorer.Chain.Import.Runner.Blocks do from( zrc2_token_transfer in Zrc2TokenTransfer, join: s in subquery(acquire_query), - on: zrc2_token_transfer.block_number == s.number, - where: zrc2_token_transfer.block_hash not in ^hashes + on: zrc2_token_transfer.block_number == s.number and zrc2_token_transfer.block_hash == s.hash, + where: zrc2_token_transfer.block_hash not in ^consensus_hashes ), timeout: timeout ) diff --git a/apps/explorer/priv/repo/migrations/20260217131711_reset_token_transfer_block_consensus_migration.exs b/apps/explorer/priv/repo/migrations/20260217131711_reset_token_transfer_block_consensus_migration.exs new file mode 100644 index 000000000000..e759cad6719a --- /dev/null +++ b/apps/explorer/priv/repo/migrations/20260217131711_reset_token_transfer_block_consensus_migration.exs @@ -0,0 +1,7 @@ +defmodule Explorer.Repo.Migrations.ResetTokenTransferBlockConsensusMigration do + use Ecto.Migration + + def change do + execute("DELETE FROM migrations_status WHERE migration_name = 'token_transfers_block_consensus'") + end +end From 26f6aa2cbf31a1405a8a5bc724ec9d20c6949e3c Mon Sep 17 00:00:00 2001 From: Qwerty5Uiop <105209995+Qwerty5Uiop@users.noreply.github.com> Date: Tue, 24 Mar 2026 14:23:15 +0400 Subject: [PATCH 36/42] chore: Add token transfer consensus sanitizer (#14144) Co-authored-by: Victor Baranov --- apps/indexer/lib/indexer/supervisor.ex | 6 +- ...oken_transfer_block_consensus_sanitizer.ex | 61 +++++++++++++++++++ config/runtime.exs | 3 + docker-compose/envs/common-blockscout.env | 13 ++-- 4 files changed, 76 insertions(+), 7 deletions(-) create mode 100644 apps/indexer/lib/indexer/token_transfer_block_consensus_sanitizer.ex diff --git a/apps/indexer/lib/indexer/supervisor.ex b/apps/indexer/lib/indexer/supervisor.ex index 057de8cc7305..ee3c802cb8b5 100644 --- a/apps/indexer/lib/indexer/supervisor.ex +++ b/apps/indexer/lib/indexer/supervisor.ex @@ -15,7 +15,8 @@ defmodule Indexer.Supervisor do BridgedTokens.SetAmbBridgedMetadataForTokens, BridgedTokens.SetOmniBridgedMetadataForTokens, PendingOpsCleaner, - PendingTransactionsSanitizer + PendingTransactionsSanitizer, + TokenTransferBlockConsensusSanitizer } alias Indexer.Block.Catchup, as: BlockCatchup @@ -369,6 +370,9 @@ defmodule Indexer.Supervisor do :blackfort -> [{ValidatorBlackfort, []} | fetchers] + :rsk -> + [TokenTransferBlockConsensusSanitizer | fetchers] + _ -> fetchers end diff --git a/apps/indexer/lib/indexer/token_transfer_block_consensus_sanitizer.ex b/apps/indexer/lib/indexer/token_transfer_block_consensus_sanitizer.ex new file mode 100644 index 000000000000..9299ee8ca06b --- /dev/null +++ b/apps/indexer/lib/indexer/token_transfer_block_consensus_sanitizer.ex @@ -0,0 +1,61 @@ +defmodule Indexer.TokenTransferBlockConsensusSanitizer do + @moduledoc """ + Periodically find token transfers with incorrect block_consensus and set refetch_needed for their blocks. + """ + + use GenServer + + require Logger + + import Ecto.Query + + alias Explorer.Chain.{Block, TokenTransfer} + alias Explorer.Repo + + def child_spec(_) do + %{ + id: __MODULE__, + start: {__MODULE__, :start_link, []}, + type: :worker, + restart: :permanent, + shutdown: Application.get_env(:indexer, :graceful_shutdown_period) + } + end + + def start_link do + GenServer.start_link(__MODULE__, :ok, name: __MODULE__) + end + + def init(_) do + schedule_sanitize() + + {:ok, %{}} + end + + def handle_info(:sanitize, state) do + block_numbers = + TokenTransfer + |> join(:inner, [tt], b in assoc(tt, :block)) + |> where([tt, b], tt.block_consensus != b.consensus) + |> select([tt], tt.block_number) + |> distinct(true) + |> Repo.all(timeout: :infinity) + + case block_numbers do + [] -> + Logger.debug("[TokenTransferBlockConsensusSanitizer] No inconsistent token transfer block consensus found") + + numbers -> + Logger.info("[TokenTransferBlockConsensusSanitizer] Marking #{length(numbers)} blocks for refetch") + Block.set_refetch_needed(numbers) + end + + schedule_sanitize() + + {:noreply, state} + end + + defp schedule_sanitize do + Process.send_after(self(), :sanitize, Application.get_env(:indexer, __MODULE__)[:interval]) + end +end diff --git a/config/runtime.exs b/config/runtime.exs index 91e5b47c9836..8265b461c68f 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -1063,6 +1063,9 @@ config :indexer, Indexer.Fetcher.TransactionAction, config :indexer, Indexer.PendingTransactionsSanitizer, interval: ConfigHelper.parse_time_env_var("INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL", "1h") +config :indexer, Indexer.TokenTransferBlockConsensusSanitizer, + interval: ConfigHelper.parse_time_env_var("INDEXER_TOKEN_TRANSFER_BLOCK_CONSENSUS_SANITIZER_INTERVAL", "20m") + config :indexer, Indexer.Fetcher.PendingTransaction.Supervisor, disabled?: ConfigHelper.parse_bool_env_var("INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER") diff --git a/docker-compose/envs/common-blockscout.env b/docker-compose/envs/common-blockscout.env index a5dc8d7d8841..4389cc36f784 100644 --- a/docker-compose/envs/common-blockscout.env +++ b/docker-compose/envs/common-blockscout.env @@ -189,7 +189,6 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_DISABLE_TOKEN_INSTANCE_RETRY_FETCHER=false # INDEXER_DISABLE_TOKEN_INSTANCE_SANITIZE_FETCHER=false # INDEXER_DISABLE_TOKEN_INSTANCE_REFETCH_FETCHER=false -# INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL= # INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER=false # INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER=false # INDEXER_DISABLE_CATALOGED_TOKEN_UPDATER_FETCHER= @@ -197,6 +196,8 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_DISABLE_EMPTY_BLOCKS_SANITIZER= # INDEXER_DISABLE_WITHDRAWALS_FETCHER= # INDEXER_DISABLE_REPLACED_TRANSACTION_FETCHER= +# INDEXER_DISABLE_TOKEN_INSTANCE_ERC_1155_SANITIZE_FETCHER=false +# INDEXER_DISABLE_TOKEN_INSTANCE_ERC_721_SANITIZE_FETCHER=false # INDEXER_DISABLE_MULTICHAIN_SEARCH_DB_EXPORT_MAIN_QUEUE_FETCHER= # INDEXER_DISABLE_MULTICHAIN_SEARCH_DB_EXPORT_BALANCES_QUEUE_FETCHER= # INDEXER_DISABLE_MULTICHAIN_SEARCH_DB_EXPORT_TOKEN_INFO_QUEUE_FETCHER= @@ -211,6 +212,7 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_INTERNAL_TRANSACTIONS_CONCURRENCY= # INDEXER_BLOCK_REWARD_BATCH_SIZE= # INDEXER_BLOCK_REWARD_CONCURRENCY= +# INDEXER_PENDING_TRANSACTIONS_SANITIZER_INTERVAL= # INDEXER_TOKEN_INSTANCE_USE_BASE_URI_RETRY= # INDEXER_TOKEN_INSTANCE_RETRY_REFETCH_INTERVAL= # INDEXER_TOKEN_INSTANCE_RETRY_BATCH_SIZE=10 @@ -221,6 +223,10 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_TOKEN_INSTANCE_SANITIZE_CONCURRENCY= # INDEXER_TOKEN_INSTANCE_REFETCH_BATCH_SIZE=10 # INDEXER_TOKEN_INSTANCE_REFETCH_CONCURRENCY= +# INDEXER_TOKEN_INSTANCE_CIDR_BLACKLIST= +# INDEXER_TOKEN_INSTANCE_HOST_FILTERING_ENABLED= +# INDEXER_TOKEN_INSTANCE_ALLOWED_URI_PROTOCOLS= +# INDEXER_TOKEN_TRANSFER_BLOCK_CONSENSUS_SANITIZER_INTERVAL=20m # INDEXER_SIGNED_AUTHORIZATION_STATUS_BATCH_SIZE= # INDEXER_FHE_OPERATIONS_ENABLED=false # INDEXER_MULTICHAIN_SEARCH_DB_EXPORT_MAIN_QUEUE_BATCH_SIZE= @@ -246,11 +252,6 @@ API_V1_WRITE_METHODS_DISABLED=false # INDEXER_INTERNAL_TRANSACTION_DELETE_QUEUE_BATCH_SIZE= # INDEXER_INTERNAL_TRANSACTION_DELETE_QUEUE_CONCURRENCY= # INDEXER_INTERNAL_TRANSACTION_DELETE_QUEUE_THRESHOLD= -# INDEXER_TOKEN_INSTANCE_CIDR_BLACKLIST= -# INDEXER_TOKEN_INSTANCE_HOST_FILTERING_ENABLED= -# INDEXER_TOKEN_INSTANCE_ALLOWED_URI_PROTOCOLS= -# INDEXER_DISABLE_TOKEN_INSTANCE_ERC_1155_SANITIZE_FETCHER=false -# INDEXER_DISABLE_TOKEN_INSTANCE_ERC_721_SANITIZE_FETCHER=false # INDEXER_COIN_BALANCES_BATCH_SIZE= # INDEXER_COIN_BALANCES_CONCURRENCY= # INDEXER_RECEIPTS_BATCH_SIZE= From 9eead37d95bfafb48b5d52e96ddd764a6b3a475a Mon Sep 17 00:00:00 2001 From: GimluCom <79271880+GimluCom@users.noreply.github.com> Date: Wed, 25 Mar 2026 14:16:25 +0100 Subject: [PATCH 37/42] fix: Allow fetching of stale token balances (#14154) --- .../runner/address/current_token_balances.ex | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/import/runner/address/current_token_balances.ex b/apps/explorer/lib/explorer/chain/import/runner/address/current_token_balances.ex index 307733c7073c..85e82bb0de68 100644 --- a/apps/explorer/lib/explorer/chain/import/runner/address/current_token_balances.ex +++ b/apps/explorer/lib/explorer/chain/import/runner/address/current_token_balances.ex @@ -297,9 +297,6 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do # ctb does not exist defp should_update?(_new_ctb, nil), do: true - # new ctb has no value - defp should_update?(%{value_fetched_at: nil}, _existing_ctb), do: false - # new ctb is newer defp should_update?(%{block_number: new_ctb_block_number}, %{block_number: existing_ctb_block_number}) when new_ctb_block_number > existing_ctb_block_number, @@ -379,7 +376,7 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do update: [ set: [ block_number: fragment("EXCLUDED.block_number"), - value: fragment("EXCLUDED.value"), + value: fragment("COALESCE(EXCLUDED.value, ?)", current_token_balance.value), value_fetched_at: fragment("EXCLUDED.value_fetched_at"), old_value: current_token_balance.value, token_type: fragment("EXCLUDED.token_type"), @@ -390,12 +387,12 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalances do ] ], where: - fragment("EXCLUDED.value_fetched_at IS NOT NULL") and - (fragment("? < EXCLUDED.block_number", current_token_balance.block_number) or - (fragment("? = EXCLUDED.block_number", current_token_balance.block_number) and - fragment("EXCLUDED.value IS NOT NULL") and - (is_nil(current_token_balance.value_fetched_at) or - fragment("? < EXCLUDED.value_fetched_at", current_token_balance.value_fetched_at)))) + fragment("? < EXCLUDED.block_number", current_token_balance.block_number) or + (fragment("? = EXCLUDED.block_number", current_token_balance.block_number) and + fragment("EXCLUDED.value_fetched_at IS NOT NULL") and + fragment("EXCLUDED.value IS NOT NULL") and + (is_nil(current_token_balance.value_fetched_at) or + fragment("? < EXCLUDED.value_fetched_at", current_token_balance.value_fetched_at))) ) end From e546c9568c7dfd40dec76b4940b3895c5b9d0846 Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Wed, 25 Mar 2026 20:18:42 +0300 Subject: [PATCH 38/42] chore: Stabilize various flaky tests (#14149) --- .../channels/exchange_rate_channel_test.exs | 15 ++-- .../channels/v2/block_channel_test.exs | 5 +- .../api/v2/authenticate_controller_test.exs | 8 ++ ...address_token_transfer_controller_test.exs | 5 +- .../api/v2/address_controller_test.exs | 59 ++++++++++++++- .../test/support/feature_case.ex | 4 + apps/explorer/lib/explorer/etherscan/logs.ex | 2 + .../third_party_integrations/auth0/legacy.ex | 18 +++-- .../lib/explorer/token/metadata_retriever.ex | 10 ++- .../address/current_token_balances_test.exs | 8 +- ..._zero_value_internal_transactions_test.exs | 75 +++++++++++++++---- ...ract_address_hash_w_pending_index_test.exs | 10 +++ ..._blocks_with_missing_transactions_test.exs | 15 ++-- apps/explorer/test/support/data_case.ex | 12 ++- .../catchup/massive_blocks_fetcher_test.exs | 61 ++++++++++++++- .../indexer/fetcher/contract_code_test.exs | 44 ++++++++--- .../fetcher/internal_transaction_test.exs | 9 ++- .../fetcher/on_demand/coin_balance_test.exs | 8 +- .../fetcher/on_demand/contract_code_test.exs | 31 +++++++- .../fetcher/on_demand/token_balance_test.exs | 35 +++++++-- .../token_instance_metadata_refetch_test.exs | 26 +++++-- .../fetcher/pending_transaction_test.exs | 16 +++- .../fetcher/token_instance/realtime_test.exs | 31 +++++--- .../test/indexer/fetcher/uncle_block_test.exs | 12 ++- 24 files changed, 424 insertions(+), 95 deletions(-) diff --git a/apps/block_scout_web/test/block_scout_web/channels/exchange_rate_channel_test.exs b/apps/block_scout_web/test/block_scout_web/channels/exchange_rate_channel_test.exs index 035c39baff8b..41ee3b6aa449 100644 --- a/apps/block_scout_web/test/block_scout_web/channels/exchange_rate_channel_test.exs +++ b/apps/block_scout_web/test/block_scout_web/channels/exchange_rate_channel_test.exs @@ -6,7 +6,7 @@ defmodule BlockScoutWeb.ExchangeRateChannelTest do alias BlockScoutWeb.Notifier alias Explorer.Market alias Explorer.Market.Fetcher.Coin - alias Explorer.Market.{MarketHistory, Token} + alias Explorer.Market.{MarketHistory, MarketHistoryCache, Token} alias Explorer.Market.Source.TestSource setup :verify_on_exit! @@ -46,8 +46,8 @@ defmodule BlockScoutWeb.ExchangeRateChannelTest do describe "new_rate" do test "subscribed user is notified", %{token: token} do Coin.handle_info({nil, {{:ok, token}, false}}, %{}) - Supervisor.terminate_child(Explorer.Supervisor, {ConCache, Explorer.Market.MarketHistoryCache.cache_name()}) - Supervisor.restart_child(Explorer.Supervisor, {ConCache, Explorer.Market.MarketHistoryCache.cache_name()}) + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.updated_at_key()) + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.data_key()) topic = "exchange_rate_old:new_rate" @endpoint.subscribe(topic) @@ -73,8 +73,8 @@ defmodule BlockScoutWeb.ExchangeRateChannelTest do end) Coin.handle_info({nil, {{:ok, token}, false}}, %{}) - Supervisor.terminate_child(Explorer.Supervisor, {ConCache, Explorer.Market.MarketHistoryCache.cache_name()}) - Supervisor.restart_child(Explorer.Supervisor, {ConCache, Explorer.Market.MarketHistoryCache.cache_name()}) + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.updated_at_key()) + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.data_key()) today = Date.utc_today() @@ -90,7 +90,10 @@ defmodule BlockScoutWeb.ExchangeRateChannelTest do MarketHistory.bulk_insert(records) - Market.fetch_recent_history() + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.updated_at_key()) + ConCache.delete(MarketHistoryCache.cache_name(), MarketHistoryCache.data_key()) + + assert Enum.map(Market.fetch_recent_history(), &Map.take(&1, [:date, :closing_price])) == records topic = "exchange_rate_old:new_rate" @endpoint.subscribe(topic) diff --git a/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs b/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs index 116a00d00efa..a071ea19c9af 100644 --- a/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs +++ b/apps/block_scout_web/test/block_scout_web/channels/v2/block_channel_test.exs @@ -2,6 +2,7 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do use BlockScoutWeb.ChannelCase alias BlockScoutWeb.Notifier + alias Explorer.Chain.Address alias Explorer.Chain.Cache.Counters.AverageBlockTime alias Plug.Conn @@ -90,7 +91,7 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do 200, Jason.encode!(%{ "names" => %{ - to_string(miner.hash) => "miner.eth" + Address.checksum(miner.hash) => "miner.eth" } }) ) @@ -102,7 +103,7 @@ defmodule BlockScoutWeb.V2.BlockChannelTest do 200, Jason.encode!(%{ "addresses" => %{ - to_string(miner.hash) => %{ + Address.checksum(miner.hash) => %{ "tags" => [] } } diff --git a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs index de89711a0865..79a2a25c1d93 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/account/api/v2/authenticate_controller_test.exs @@ -3,11 +3,19 @@ defmodule BlockScoutWeb.Account.API.V2.AuthenticateControllerTest do alias Explorer.Account.Identity alias Explorer.Chain.Address + alias Explorer.Helper + alias Explorer.ThirdPartyIntegrations.Auth0.Internal alias Explorer.ThirdPartyIntegrations.Dynamic alias Explorer.ThirdPartyIntegrations.Dynamic.Strategy import Mox + setup do + Redix.command(:redix, ["DEL", Helper.redis_key(Internal.redis_key())]) + + :ok + end + describe "POST api/account/v2/send_otp" do test "send OTP successfully", %{conn: conn} do Tesla.Test.expect_tesla_call( diff --git a/apps/block_scout_web/test/block_scout_web/controllers/address_token_transfer_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/address_token_transfer_controller_test.exs index 5048056e613f..32f45dab160d 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/address_token_transfer_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/address_token_transfer_controller_test.exs @@ -151,11 +151,12 @@ defmodule BlockScoutWeb.AddressTokenTransferControllerTest do test "returns next_page_path when there are more items", %{conn: conn} do address = insert(:address) token = insert(:token) + start_block_number = 2_000_000 + System.unique_integer([:positive]) page_last_transfer = 1..50 |> Enum.map(fn index -> - block = insert(:block, number: 1000 - index) + block = insert(:block, number: start_block_number - index) transaction = :transaction @@ -174,7 +175,7 @@ defmodule BlockScoutWeb.AddressTokenTransferControllerTest do |> List.last() Enum.each(51..60, fn index -> - block = insert(:block, number: 1000 - index) + block = insert(:block, number: start_block_number - index) transaction = :transaction diff --git a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs index 1b5331d7c539..3631736d31ba 100644 --- a/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs +++ b/apps/block_scout_web/test/block_scout_web/controllers/api/v2/address_controller_test.exs @@ -28,6 +28,7 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do alias Indexer.Fetcher.OnDemand.ContractCode, as: ContractCodeOnDemand alias Plug.Conn + import Ecto.Query, only: [from: 2] import Explorer.Chain, only: [hash_to_lower_case_string: 1] import Mox @@ -974,7 +975,7 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do transactions = (transactions_from ++ transactions_to) - |> Enum.sort(&(Decimal.compare(Wei.to(&1.value, :wei), Wei.to(&2.value, :wei)) in [:eq, :lt])) + |> sort_transactions_by_value(:asc) request = get(conn, "/api/v2/addresses/#{address.hash}/transactions", %{"sort" => "value", "order" => "asc"}) assert response = json_response(request, 200) @@ -1008,7 +1009,7 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do transactions = (transactions_from ++ transactions_to) - |> Enum.sort(&(Decimal.compare(Wei.to(&1.value, :wei), Wei.to(&2.value, :wei)) in [:eq, :gt])) + |> sort_transactions_by_value(:desc) request = get(conn, "/api/v2/addresses/#{address.hash}/transactions", %{"sort" => "value", "order" => "desc"}) assert response = json_response(request, 200) @@ -5702,8 +5703,29 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do assert to_string(cb.value.value) == json["value"] assert cb.block_number == json["block_number"] - assert Jason.encode!(Repo.get_by(Block, number: cb.block_number).timestamp) =~ - String.replace(json["block_timestamp"], "Z", "") + # The API uses linear interpolation over the fetched set (page_size + 1 items), + # so the returned timestamp may be off by up to 1 second from the actual block + # timestamp. Allow ±1 second tolerance to account for this artifact. + expected_timestamps = + Repo.all( + from(block in Block, + where: block.number == ^cb.block_number, + select: block.timestamp + ) + ) + |> Enum.flat_map(fn ts -> + truncated = DateTime.truncate(ts, :second) + + [ + DateTime.add(truncated, -1, :second), + truncated, + DateTime.add(truncated, 1, :second) + ] + end) + |> Enum.uniq() + + {:ok, response_timestamp, 0} = DateTime.from_iso8601(json["block_timestamp"]) + assert DateTime.truncate(response_timestamp, :second) in expected_timestamps end defp compare_item(%Token{} = token, json) do @@ -5877,6 +5899,35 @@ defmodule BlockScoutWeb.API.V2.AddressControllerTest do defp value("ERC-721", _), do: 1 defp value(_, nft), do: nft.current_token_balance.value + defp sort_transactions_by_value(transactions, order) do + Enum.sort(transactions, fn a, b -> + case Decimal.compare(Wei.to(a.value, :wei), Wei.to(b.value, :wei)) do + :lt -> order == :asc + :gt -> order == :desc + :eq -> compare_transactions_default_order(a, b) + end + end) + end + + defp compare_transactions_default_order(a, b) do + case { + compare_values(a.block_number, b.block_number), + compare_values(a.index, b.index), + DateTime.compare(a.inserted_at, b.inserted_at), + compare_values(to_string(a.hash), to_string(b.hash)) + } do + {:lt, _, _, _} -> false + {:eq, :lt, _, _} -> false + {:eq, :eq, :lt, _} -> false + {:eq, :eq, :eq, :gt} -> false + _ -> true + end + end + + defp compare_values(a, b) when a < b, do: :lt + defp compare_values(a, b) when a > b, do: :gt + defp compare_values(_, _), do: :eq + defp check_paginated_response(first_page_resp, second_page_resp, list) do assert Enum.count(first_page_resp["items"]) == 50 assert first_page_resp["next_page_params"] != nil diff --git a/apps/block_scout_web/test/support/feature_case.ex b/apps/block_scout_web/test/support/feature_case.ex index a3b811f1d79b..b3954dc57445 100644 --- a/apps/block_scout_web/test/support/feature_case.ex +++ b/apps/block_scout_web/test/support/feature_case.ex @@ -31,6 +31,10 @@ defmodule BlockScoutWeb.FeatureCase do Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Transactions.child_id()) Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id()) Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Accounts.child_id()) + Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) + Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Blocks.child_id()) + Supervisor.terminate_child(Explorer.Supervisor, Explorer.Chain.Cache.Uncles.child_id()) + Supervisor.restart_child(Explorer.Supervisor, Explorer.Chain.Cache.Uncles.child_id()) metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(Explorer.Repo, self()) {:ok, session} = Wallaby.start_session(metadata: metadata) diff --git a/apps/explorer/lib/explorer/etherscan/logs.ex b/apps/explorer/lib/explorer/etherscan/logs.ex index 26ec06cf0044..bf6df7ba9137 100644 --- a/apps/explorer/lib/explorer/etherscan/logs.ex +++ b/apps/explorer/lib/explorer/etherscan/logs.ex @@ -99,6 +99,7 @@ defmodule Explorer.Etherscan.Logs do block_timestamp: transaction.block_timestamp, block_consensus: transaction.block_consensus }, + order_by: [asc: transaction.block_number, asc: log.index], limit: 1000 ) @@ -123,6 +124,7 @@ defmodule Explorer.Etherscan.Logs do block_timestamp: block.timestamp, block_consensus: block.consensus }, + order_by: [asc: block.number, asc: log.index], limit: 1000 ) diff --git a/apps/explorer/lib/explorer/third_party_integrations/auth0/legacy.ex b/apps/explorer/lib/explorer/third_party_integrations/auth0/legacy.ex index c16e50fe50ee..c987f1a8fba9 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/auth0/legacy.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/auth0/legacy.ex @@ -101,11 +101,12 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0.Legacy do @spec find_or_create_web3_user(String.t(), String.t()) :: {:ok, map()} | :error | {:error, String.t()} def find_or_create_web3_user(address, signature) do case Internal.find_users_by_web3_address(address) do - {:ok, [%{"user_metadata" => %{"web3_address_hash" => ^address}} = user]} -> - {:ok, user} - - {:ok, [%{"user_id" => user_id}]} -> - update_user_with_web3_address(user_id, address) + {:ok, [user]} -> + if same_web3_address?(user, address) do + {:ok, user} + else + update_user_with_web3_address(user["user_id"], address) + end {:ok, []} -> Internal.create_web3_user(address, signature, %{web3_address_hash: address}) @@ -176,6 +177,13 @@ defmodule Explorer.ThirdPartyIntegrations.Auth0.Legacy do defp maybe_verify_email(_), do: :ok + defp same_web3_address?(%{"user_metadata" => %{"web3_address_hash" => stored_address}}, address) + when is_binary(stored_address) do + String.downcase(stored_address) == String.downcase(address) + end + + defp same_web3_address?(_, _), do: false + defp merge_email_users([primary_user | _] = users, identity_id_to_link, provider_for_linking) do identity_map = get_identity_map(users) diff --git a/apps/explorer/lib/explorer/token/metadata_retriever.ex b/apps/explorer/lib/explorer/token/metadata_retriever.ex index ab83d3b0734a..b76ad4a91a1c 100644 --- a/apps/explorer/lib/explorer/token/metadata_retriever.ex +++ b/apps/explorer/lib/explorer/token/metadata_retriever.ex @@ -245,13 +245,21 @@ defmodule Explorer.Token.MetadataRetriever do Map.put( metadata, :skip_metadata, - Enum.all?(raw_metadata, fn {_key, value} -> EthereumJSONRPC.contract_failure?(value) end) + Enum.all?(raw_metadata, fn {_key, value} -> contract_failure?(value) end) ) else metadata end end + defp contract_failure?({:error, %{message: message}}) when is_binary(message), + do: String.match?(message, ~r/execution.*revert/) + + defp contract_failure?({:error, %{data: data}}) when is_binary(data), + do: String.match?(data, ~r/execution.*revert/) + + defp contract_failure?(error), do: EthereumJSONRPC.contract_failure?(error) + defp try_to_fetch_erc_1155_name(base_metadata, contract_address_hash, token_type) do if token_type == "ERC-1155" && !Map.has_key?(base_metadata, :name) do erc_1155_name_uri = diff --git a/apps/explorer/test/explorer/chain/import/runner/address/current_token_balances_test.exs b/apps/explorer/test/explorer/chain/import/runner/address/current_token_balances_test.exs index e61247b327e8..c5317f83114d 100644 --- a/apps/explorer/test/explorer/chain/import/runner/address/current_token_balances_test.exs +++ b/apps/explorer/test/explorer/chain/import/runner/address/current_token_balances_test.exs @@ -307,7 +307,7 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalancesTest do assert current_token_balance.value == Decimal.new(200) end - test "ignores when the new value_fetched_at not set", %{ + test "updates when the new block number is greater even if value_fetched_at is not set", %{ address: %Address{hash: address_hash} = address, token: %Token{contract_address_hash: token_contract_address_hash}, options: options @@ -323,7 +323,7 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalancesTest do update_holder_count!(token_contract_address_hash, 1) - assert {:ok, %{address_current_token_balances: [], address_current_token_balances_update_token_holder_counts: []}} = + assert {:ok, %{address_current_token_balances_update_token_holder_counts: []}} = run_changes( %{ address_hash: address_hash, @@ -336,8 +336,8 @@ defmodule Explorer.Chain.Import.Runner.Address.CurrentTokenBalancesTest do current_token_balance = Repo.get_by(CurrentTokenBalance, address_hash: address_hash) - assert current_token_balance.block_number == 1 - assert current_token_balance.value == Decimal.new(200) + assert current_token_balance.block_number == 2 + assert current_token_balance.value == Decimal.new(100) end test "ignores when the new block number is lesser", %{ diff --git a/apps/explorer/test/explorer/migrator/delete_zero_value_internal_transactions_test.exs b/apps/explorer/test/explorer/migrator/delete_zero_value_internal_transactions_test.exs index 0ad062a92b37..dd5c5c80ab9f 100644 --- a/apps/explorer/test/explorer/migrator/delete_zero_value_internal_transactions_test.exs +++ b/apps/explorer/test/explorer/migrator/delete_zero_value_internal_transactions_test.exs @@ -6,6 +6,20 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do alias Explorer.Repo alias Explorer.Utility.{AddressIdToAddressHash, InternalTransactionsAddressPlaceholder} + setup do + on_exit(fn -> + if pid = Process.whereis(DeleteZeroValueInternalTransactions) do + try do + GenServer.stop(pid, :normal, 5000) + catch + :exit, {:noproc, _} -> :ok + end + end + end) + + :ok + end + test "Deletes zero value calls" do address_1 = insert(:address) address_2 = insert(:address) @@ -127,7 +141,9 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do wait_for_results(fn -> Repo.one!( from(ms in MigrationStatus, - where: ms.migration_name == ^"delete_zero_value_internal_transactions" and ms.status == "completed" + where: + ms.migration_name == ^"delete_zero_value_internal_transactions" and + ms.status == "completed" ) ) end) @@ -137,12 +153,18 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do assert Enum.count(all_internal_transactions) == 15 non_zero_value_calls = - Enum.filter(all_internal_transactions, &(&1.type == :call and not Decimal.eq?(&1.value.value, 0))) + Enum.filter( + all_internal_transactions, + &(&1.type == :call and not Decimal.eq?(&1.value.value, 0)) + ) non_calls = Enum.filter(all_internal_transactions, &(&1.type != :call)) recent_zero_value_calls = - Enum.filter(all_internal_transactions, &(&1.type == :call and Decimal.eq?(&1.value.value, 0))) + Enum.filter( + all_internal_transactions, + &(&1.type == :call and Decimal.eq?(&1.value.value, 0)) + ) assert Enum.count(non_zero_value_calls) == 4 assert Enum.all?(non_zero_value_calls, &(not Decimal.eq?(&1.value.value, 0))) @@ -155,31 +177,42 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do assert length(id_to_hashes) == 3 - assert %{address_id: address_1_id} = Enum.find(id_to_hashes, &(&1.address_hash == address_1.hash)) - assert %{address_id: address_2_id} = Enum.find(id_to_hashes, &(&1.address_hash == address_2.hash)) - assert %{address_id: address_3_id} = Enum.find(id_to_hashes, &(&1.address_hash == address_3.hash)) + assert %{address_id: address_1_id} = + Enum.find(id_to_hashes, &(&1.address_hash == address_1.hash)) + + assert %{address_id: address_2_id} = + Enum.find(id_to_hashes, &(&1.address_hash == address_2.hash)) + + assert %{address_id: address_3_id} = + Enum.find(id_to_hashes, &(&1.address_hash == address_3.hash)) placeholders = Repo.all(InternalTransactionsAddressPlaceholder) assert length(placeholders) == 3 assert Enum.any?(placeholders, fn p -> - p.address_id == address_1_id and p.block_number == block.number and p.count_tos == 5 and p.count_froms == 3 + p.address_id == address_1_id and p.block_number == block.number and p.count_tos == 5 and + p.count_froms == 3 end) assert Enum.any?(placeholders, fn p -> - p.address_id == address_2_id and p.block_number == block.number and p.count_tos == 3 and p.count_froms == 4 + p.address_id == address_2_id and p.block_number == block.number and p.count_tos == 3 and + p.count_froms == 4 end) assert Enum.any?(placeholders, fn p -> - p.address_id == address_3_id and p.block_number == block.number and p.count_tos == 4 and p.count_froms == 5 + p.address_id == address_3_id and p.block_number == block.number and p.count_tos == 4 and + p.count_froms == 5 end) end describe "ShrinkInternalTransactions migration dependency handling" do setup do - original_shrink_config = Application.get_env(:explorer, Explorer.Migrator.ShrinkInternalTransactions) - original_delete_config = Application.get_env(:explorer, Explorer.Migrator.DeleteZeroValueInternalTransactions) + original_shrink_config = + Application.get_env(:explorer, Explorer.Migrator.ShrinkInternalTransactions) + + original_delete_config = + Application.get_env(:explorer, Explorer.Migrator.DeleteZeroValueInternalTransactions) # Set a short dependency check interval for tests Application.put_env(:explorer, Explorer.Migrator.DeleteZeroValueInternalTransactions, @@ -188,13 +221,21 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do on_exit(fn -> if original_shrink_config do - Application.put_env(:explorer, Explorer.Migrator.ShrinkInternalTransactions, original_shrink_config) + Application.put_env( + :explorer, + Explorer.Migrator.ShrinkInternalTransactions, + original_shrink_config + ) else Application.delete_env(:explorer, Explorer.Migrator.ShrinkInternalTransactions) end if original_delete_config do - Application.put_env(:explorer, Explorer.Migrator.DeleteZeroValueInternalTransactions, original_delete_config) + Application.put_env( + :explorer, + Explorer.Migrator.DeleteZeroValueInternalTransactions, + original_delete_config + ) else Application.delete_env(:explorer, Explorer.Migrator.DeleteZeroValueInternalTransactions) end @@ -251,7 +292,9 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do wait_for_results(fn -> Repo.one!( from(ms in MigrationStatus, - where: ms.migration_name == ^"delete_zero_value_internal_transactions" and ms.status == "completed" + where: + ms.migration_name == ^"delete_zero_value_internal_transactions" and + ms.status == "completed" ) ) end) @@ -297,7 +340,9 @@ defmodule Explorer.Migrator.DeleteZeroValueInternalTransactionsTest do wait_for_results(fn -> Repo.one!( from(ms in MigrationStatus, - where: ms.migration_name == ^"delete_zero_value_internal_transactions" and ms.status == "completed" + where: + ms.migration_name == ^"delete_zero_value_internal_transactions" and + ms.status == "completed" ) ) end) diff --git a/apps/explorer/test/explorer/migrator/heavy_db_index_operation/create_transactions_created_contract_address_hash_w_pending_index_test.exs b/apps/explorer/test/explorer/migrator/heavy_db_index_operation/create_transactions_created_contract_address_hash_w_pending_index_test.exs index fc97ccc8a5f1..df7e446ea96c 100644 --- a/apps/explorer/test/explorer/migrator/heavy_db_index_operation/create_transactions_created_contract_address_hash_w_pending_index_test.exs +++ b/apps/explorer/test/explorer/migrator/heavy_db_index_operation/create_transactions_created_contract_address_hash_w_pending_index_test.exs @@ -1,16 +1,26 @@ defmodule Explorer.Migrator.HeavyDbIndexOperation.CreateTransactionsCreatedContractAddressHashWPendingIndexTest do use Explorer.DataCase, async: false + import Ecto.Query + alias Explorer.Chain.Cache.BackgroundMigrations alias Explorer.Migrator.{HeavyDbIndexOperation, MigrationStatus} alias Explorer.Migrator.HeavyDbIndexOperation.Helper alias Explorer.Migrator.HeavyDbIndexOperation.CreateTransactionsCreatedContractAddressHashWPendingIndex + alias Explorer.Repo describe "Creates heavy index `transactions_created_contract_address_hash_w_pending_index`" do setup do configuration = Application.get_env(:explorer, HeavyDbIndexOperation) Application.put_env(:explorer, HeavyDbIndexOperation, check_interval: 200) + migration_names = + [CreateTransactionsCreatedContractAddressHashWPendingIndex.migration_name()] ++ + CreateTransactionsCreatedContractAddressHashWPendingIndex.dependent_from_migrations() + + from(ms in MigrationStatus, where: ms.migration_name in ^migration_names) + |> Repo.delete_all() + on_exit(fn -> Application.put_env(:explorer, HeavyDbIndexOperation, configuration) end) diff --git a/apps/explorer/test/explorer/migrator/reindex_blocks_with_missing_transactions_test.exs b/apps/explorer/test/explorer/migrator/reindex_blocks_with_missing_transactions_test.exs index 549d7535d118..b68b9a105b17 100644 --- a/apps/explorer/test/explorer/migrator/reindex_blocks_with_missing_transactions_test.exs +++ b/apps/explorer/test/explorer/migrator/reindex_blocks_with_missing_transactions_test.exs @@ -65,13 +65,16 @@ defmodule Explorer.Migrator.ReindexBlocksWithMissingTransactionsTest do ReindexBlocksWithMissingTransactions.start_link([]) - wait_for_results(fn -> - Repo.one!( - from(ms in MigrationStatus, - where: ms.migration_name == ^"reindex_blocks_with_missing_transactions" and ms.status == "completed" + wait_for_results( + fn -> + Repo.one!( + from(ms in MigrationStatus, + where: ms.migration_name == ^"reindex_blocks_with_missing_transactions" and ms.status == "completed" + ) ) - ) - end) + end, + 60 + ) assert %{consensus: true, refetch_needed: false} = Repo.get_by(Block, number: block_number_correct) assert %{consensus: true, refetch_needed: true} = Repo.get_by(Block, number: block_number_incorrect) diff --git a/apps/explorer/test/support/data_case.ex b/apps/explorer/test/support/data_case.ex index bf94e66cf49a..cba03414bdf4 100644 --- a/apps/explorer/test/support/data_case.ex +++ b/apps/explorer/test/support/data_case.ex @@ -56,12 +56,20 @@ defmodule Explorer.DataCase do end def wait_for_results(producer) do + wait_for_results(producer, 30) + end + + def wait_for_results(_producer, 0) do + raise "wait_for_results timed out after exhausting retries" + end + + def wait_for_results(producer, retries) when retries > 0 do Process.sleep(100) producer.() rescue - [DBConnection.ConnectionError, Ecto.NoResultsError] -> + _error in [DBConnection.ConnectionError, Ecto.NoResultsError] -> Process.sleep(300) - wait_for_results(producer) + wait_for_results(producer, retries - 1) end @doc """ diff --git a/apps/indexer/test/indexer/block/catchup/massive_blocks_fetcher_test.exs b/apps/indexer/test/indexer/block/catchup/massive_blocks_fetcher_test.exs index 5cd6be38e33b..a2540e13166b 100644 --- a/apps/indexer/test/indexer/block/catchup/massive_blocks_fetcher_test.exs +++ b/apps/indexer/test/indexer/block/catchup/massive_blocks_fetcher_test.exs @@ -14,8 +14,7 @@ defmodule Indexer.Block.Catchup.MassiveBlocksFetcherTest do ContractCode, InternalTransaction, ReplacedTransaction, - Token, - UncleBlock + Token } alias Explorer.Chain.Block @@ -25,6 +24,32 @@ defmodule Indexer.Block.Catchup.MassiveBlocksFetcherTest do setup :verify_on_exit! + setup do + old_celo_env = Application.get_env(:explorer, Explorer.Chain.Cache.CeloCoreContracts, []) + + Application.put_env(:explorer, Explorer.Chain.Cache.CeloCoreContracts, + contracts: %{ + "addresses" => %{ + "Accounts" => [], + "Election" => [], + "EpochRewards" => [], + "FeeHandler" => [], + "GasPriceMinimum" => [], + "GoldToken" => [], + "Governance" => [], + "LockedGold" => [], + "Reserve" => [], + "StableToken" => [], + "Validators" => [] + } + } + ) + + on_exit(fn -> + Application.put_env(:explorer, Explorer.Chain.Cache.CeloCoreContracts, old_celo_env) + end) + end + test "successfully imports block", %{json_rpc_named_arguments: json_rpc_named_arguments} do %{number: block_number} = insert(:massive_block) block_quantity = integer_to_quantity(block_number) @@ -214,12 +239,42 @@ defmodule Indexer.Block.Catchup.MassiveBlocksFetcherTest do TokenBalanceHistorical.Supervisor.Case.start_supervised!(json_rpc_named_arguments: json_rpc_named_arguments) ReplacedTransaction.Supervisor.Case.start_supervised!() + Indexer.Fetcher.Filecoin.AddressInfo.Supervisor.Case.start_supervised!( + json_rpc_named_arguments: json_rpc_named_arguments + ) + MassiveBlocksFetcher.start_link(%{task_supervisor: Indexer.Block.Catchup.TaskSupervisor}) - Process.sleep(1000) + wait_until(:timer.seconds(10), fn -> + match?([%{number: ^block_number}], Repo.all(Block)) and + Repo.all(MassiveBlock) == [] and + Repo.all(MissingBlockRange) == [] + end) assert [%{number: ^block_number}] = Repo.all(Block) assert [] = Repo.all(MassiveBlock) assert [] = Repo.all(MissingBlockRange) end + + defp wait_until(timeout, producer) do + parent = self() + ref = make_ref() + + spawn(fn -> do_wait_until(parent, ref, producer) end) + + receive do + {^ref, :ok} -> :ok + after + timeout -> exit(:timeout) + end + end + + defp do_wait_until(parent, ref, producer) do + if producer.() do + send(parent, {ref, :ok}) + else + :timer.sleep(100) + do_wait_until(parent, ref, producer) + end + end end diff --git a/apps/indexer/test/indexer/fetcher/contract_code_test.exs b/apps/indexer/test/indexer/fetcher/contract_code_test.exs index e671eed641a6..a0213504ad14 100644 --- a/apps/indexer/test/indexer/fetcher/contract_code_test.exs +++ b/apps/indexer/test/indexer/fetcher/contract_code_test.exs @@ -6,7 +6,7 @@ defmodule Indexer.Fetcher.ContractCodeTest do import Mox - alias Explorer.Chain.{Address, Transaction} + alias Explorer.Chain.{Address, Data, Transaction} alias Indexer.Fetcher.ContractCode @moduletag :capture_log @@ -146,24 +146,48 @@ defmodule Indexer.Fetcher.ContractCodeTest do false ) - # Wait a bit to ensure any potential processing is done - Process.sleep(100) + updated_address = + wait(fn -> + Repo.one!( + from(address in Address, where: address.hash == ^address.hash and not is_nil(address.contract_code)) + ) + end) + + assert Data.to_string(updated_address.contract_code) == "0x" - # Verify that the contract code was set to "0x" - updated_address = Repo.get!(Address, address.hash) - assert to_string(updated_address.contract_code) == "0x" + updated_transaction = + wait(fn -> + Repo.one!( + from(transaction in Transaction, + where: transaction.hash == ^transaction.hash and not is_nil(transaction.created_contract_code_indexed_at) + ) + ) + end) - # Verify that the transaction's created_contract_code_indexed_at remains nil - updated_transaction = Repo.get!(Transaction, transaction.hash) assert updated_transaction.created_contract_code_indexed_at end end defp wait(producer) do + wait(producer, 10_000) + end + + defp wait(producer, timeout) when is_integer(timeout) and timeout > 0 do + deadline = System.monotonic_time(:millisecond) + timeout + + wait(producer, timeout, deadline) + end + + defp wait(producer, timeout, deadline) do producer.() rescue Ecto.NoResultsError -> - Process.sleep(100) - wait(producer) + if System.monotonic_time(:millisecond) > deadline do + raise RuntimeError, + "wait/1 timed out after #{timeout}ms while waiting for producer #{inspect(producer)}" + else + Process.sleep(100) + wait(producer, timeout, deadline) + end end end diff --git a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs index 0855a4c3d1fd..7692199389b4 100644 --- a/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs +++ b/apps/indexer/test/indexer/fetcher/internal_transaction_test.exs @@ -82,9 +82,12 @@ defmodule Indexer.Fetcher.InternalTransactionTest do PendingTransaction.Supervisor.Case.start_supervised!(json_rpc_named_arguments: json_rpc_named_arguments) start_token_balance_fetcher(json_rpc_named_arguments) - wait_for_results(fn -> - Repo.one!(from(transaction in Explorer.Chain.Transaction, where: is_nil(transaction.block_hash), limit: 1)) - end) + wait_for_results( + fn -> + Repo.one!(from(transaction in Explorer.Chain.Transaction, where: is_nil(transaction.block_hash), limit: 1)) + end, + 60 + ) hash_strings = InternalTransaction.init([], fn hash_string, acc -> [hash_string | acc] end, json_rpc_named_arguments) diff --git a/apps/indexer/test/indexer/fetcher/on_demand/coin_balance_test.exs b/apps/indexer/test/indexer/fetcher/on_demand/coin_balance_test.exs index 5aa11aa79ae4..199973e53e63 100644 --- a/apps/indexer/test/indexer/fetcher/on_demand/coin_balance_test.exs +++ b/apps/indexer/test/indexer/fetcher/on_demand/coin_balance_test.exs @@ -165,7 +165,8 @@ defmodule Indexer.Fetcher.OnDemand.CoinBalanceTest do assert_receive( {:chain_event, :addresses, :on_demand, - [%{hash: ^address_hash, fetched_coin_balance: ^expected_wei, fetched_coin_balance_block_number: 102}]} + [%{hash: ^address_hash, fetched_coin_balance: ^expected_wei, fetched_coin_balance_block_number: 102}]}, + 1_000 ) end @@ -183,11 +184,10 @@ defmodule Indexer.Fetcher.OnDemand.CoinBalanceTest do {:ok, expected_wei} = Wei.cast(2) - :timer.sleep(100) - assert_receive( {:chain_event, :addresses, :on_demand, - [%{hash: ^address_hash, fetched_coin_balance: ^expected_wei, fetched_coin_balance_block_number: 103}]} + [%{hash: ^address_hash, fetched_coin_balance: ^expected_wei, fetched_coin_balance_block_number: 103}]}, + 1_000 ) end end diff --git a/apps/indexer/test/indexer/fetcher/on_demand/contract_code_test.exs b/apps/indexer/test/indexer/fetcher/on_demand/contract_code_test.exs index d99e5122afd4..5a9f49906574 100644 --- a/apps/indexer/test/indexer/fetcher/on_demand/contract_code_test.exs +++ b/apps/indexer/test/indexer/fetcher/on_demand/contract_code_test.exs @@ -86,7 +86,15 @@ defmodule Indexer.Fetcher.OnDemand.ContractCodeTest do address = assert(Repo.get(Address, address_hash)) assert is_nil(address.contract_code) - attempts = Repo.get(AddressContractCodeFetchAttempt, address_hash) + attempts = + wait_for_results(fn -> + Repo.one!( + from(attempt in AddressContractCodeFetchAttempt, + where: attempt.address_hash == ^address_hash and attempt.retries_number == 1 + ) + ) + end) + assert attempts.retries_number == 1 refute_receive({:chain_event, :fetched_bytecode, :on_demand, [^address_hash, "0x"]}) @@ -110,7 +118,15 @@ defmodule Indexer.Fetcher.OnDemand.ContractCodeTest do address = assert(Repo.get(Address, address_hash)) assert is_nil(address.contract_code) - attempts = Repo.get(AddressContractCodeFetchAttempt, address_hash) + attempts = + wait_for_results(fn -> + Repo.one!( + from(attempt in AddressContractCodeFetchAttempt, + where: attempt.address_hash == ^address_hash and attempt.retries_number == 1 + ) + ) + end) + assert attempts.retries_number == 1 refute_receive({:chain_event, :fetched_bytecode, :on_demand, [^address_hash, "0x"]}) @@ -127,7 +143,16 @@ defmodule Indexer.Fetcher.OnDemand.ContractCodeTest do address = assert(Repo.get(Address, address_hash)) assert is_nil(address.contract_code) - refute is_nil(Repo.get(AddressContractCodeFetchAttempt, address_hash)) + attempts = + wait_for_results(fn -> + Repo.one!( + from(attempt in AddressContractCodeFetchAttempt, + where: attempt.address_hash == ^address_hash and attempt.retries_number == 1 + ) + ) + end) + + assert attempts.retries_number == 1 refute_receive({:chain_event, :fetched_bytecode, :on_demand, [^address_hash, ^contract_code]}) diff --git a/apps/indexer/test/indexer/fetcher/on_demand/token_balance_test.exs b/apps/indexer/test/indexer/fetcher/on_demand/token_balance_test.exs index f0039d2ca9d6..d241ee43ffcd 100644 --- a/apps/indexer/test/indexer/fetcher/on_demand/token_balance_test.exs +++ b/apps/indexer/test/indexer/fetcher/on_demand/token_balance_test.exs @@ -64,9 +64,20 @@ defmodule Indexer.Fetcher.OnDemand.TokenBalanceTest do TokenBalanceOnDemand.trigger_fetch(address.hash) - Process.sleep(200) - - [%{value: updated_value} = updated_ctb] = Repo.all(CurrentTokenBalance) + updated_ctb = + wait_for_results(fn -> + Repo.one!( + from( + ctb in CurrentTokenBalance, + where: + ctb.address_hash == ^address.hash and + ctb.token_contract_address_hash == ^token_contract_address_hash and + not is_nil(ctb.value) + ) + ) + end) + + updated_value = updated_ctb.value assert updated_value == Decimal.new(1_000_000_000_000_000_000_000_000) refute is_nil(updated_ctb.value_fetched_at) @@ -97,9 +108,21 @@ defmodule Indexer.Fetcher.OnDemand.TokenBalanceTest do token_balance.block_number ) - Process.sleep(100) - - [%{value: updated_value} = updated_tb] = Repo.all(TokenBalance) + updated_tb = + wait_for_results(fn -> + Repo.one!( + from( + tb in TokenBalance, + where: + tb.address_hash == ^token_balance.address_hash and + tb.token_contract_address_hash == ^token_balance.token_contract_address_hash and + tb.block_number == ^token_balance.block_number and + not is_nil(tb.value) + ) + ) + end) + + updated_value = updated_tb.value assert updated_value == Decimal.new(1_000_000_000_000_000_000_000_000) refute is_nil(updated_tb.value_fetched_at) diff --git a/apps/indexer/test/indexer/fetcher/on_demand/token_instance_metadata_refetch_test.exs b/apps/indexer/test/indexer/fetcher/on_demand/token_instance_metadata_refetch_test.exs index d1323aff1ac6..8188b7e1a6ca 100644 --- a/apps/indexer/test/indexer/fetcher/on_demand/token_instance_metadata_refetch_test.exs +++ b/apps/indexer/test/indexer/fetcher/on_demand/token_instance_metadata_refetch_test.exs @@ -68,10 +68,17 @@ defmodule Indexer.Fetcher.OnDemand.TokenInstanceMetadataRefetchTest do assert TokenInstanceMetadataRefetchOnDemand.trigger_refetch(token_instance) == :ok - :timer.sleep(100) - token_instance_from_db = - Repo.get_by(TokenInstance, token_id: token_id, token_contract_address_hash: token.contract_address_hash) + wait_for_results(fn -> + Repo.one!( + from(ti in TokenInstance, + where: + ti.token_id == ^token_id and + ti.token_contract_address_hash == ^token.contract_address_hash and + ti.metadata == ^metadata + ) + ) + end) assert(token_instance_from_db) refute is_nil(token_instance_from_db.metadata) @@ -121,10 +128,17 @@ defmodule Indexer.Fetcher.OnDemand.TokenInstanceMetadataRefetchTest do assert TokenInstanceMetadataRefetchOnDemand.trigger_refetch(token_instance) == :ok - :timer.sleep(100) - token_instance_from_db = - Repo.get_by(TokenInstance, token_id: token_id, token_contract_address_hash: token.contract_address_hash) + wait_for_results(fn -> + Repo.one!( + from(ti in TokenInstance, + where: + ti.token_id == ^token_id and + ti.token_contract_address_hash == ^token.contract_address_hash and + ti.metadata == ^metadata + ) + ) + end) assert(token_instance_from_db) assert token_instance_from_db.metadata == metadata diff --git a/apps/indexer/test/indexer/fetcher/pending_transaction_test.exs b/apps/indexer/test/indexer/fetcher/pending_transaction_test.exs index 7d4b1c7ea746..22d00e48c99a 100644 --- a/apps/indexer/test/indexer/fetcher/pending_transaction_test.exs +++ b/apps/indexer/test/indexer/fetcher/pending_transaction_test.exs @@ -60,9 +60,19 @@ defmodule Indexer.Fetcher.PendingTransactionTest do PendingTransaction.Supervisor.Case.start_supervised!(json_rpc_named_arguments: json_rpc_named_arguments) - wait_for_results(fn -> - Repo.one!(from(transaction in Transaction, where: is_nil(transaction.block_hash), limit: 1)) - end) + pending_hash = "0x3a3eb134e6792ce9403ea4188e5e79693de9e4c94e499db132be086400da79e6" + + wait_for_results( + fn -> + Repo.one!( + from(transaction in Transaction, + where: is_nil(transaction.block_hash) and transaction.hash == ^pending_hash, + limit: 1 + ) + ) + end, + 60 + ) assert Repo.aggregate(Transaction, :count, :hash) >= 1 end diff --git a/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs b/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs index b05310097186..47878311c5b6 100644 --- a/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs +++ b/apps/indexer/test/indexer/fetcher/token_instance/realtime_test.exs @@ -102,20 +102,33 @@ defmodule Indexer.Fetcher.TokenInstance.RealtimeTest do type: "ERC-1155" ) - insert(:token_instance, - token_id: 777, - token_contract_address_hash: token.contract_address_hash, - metadata: nil, - error: nil - ) + inserted_instance = + insert(:token_instance, + token_id: 777, + token_contract_address_hash: token.contract_address_hash, + metadata: nil, + error: nil + ) TokenInstanceRealtime.async_fetch([ %{token_contract_address_hash: token.contract_address_hash, token_ids: [Decimal.new(777)]} ]) - :timer.sleep(250) - - [instance] = Repo.all(Instance) + instance = + Enum.reduce_while(1..30, nil, fn _, _ -> + :timer.sleep(100) + + case Repo.get_by(Instance, + token_id: inserted_instance.token_id, + token_contract_address_hash: inserted_instance.token_contract_address_hash + ) do + %{metadata: metadata} = inst when not is_nil(metadata) -> {:halt, inst} + _ -> {:cont, nil} + end + end) + + assert instance != nil, + "Timed out waiting for token instance #{inserted_instance.token_id} at #{inserted_instance.token_contract_address_hash} metadata to be populated" assert is_nil(instance.error) assert instance.metadata == %{"name" => "name"} diff --git a/apps/indexer/test/indexer/fetcher/uncle_block_test.exs b/apps/indexer/test/indexer/fetcher/uncle_block_test.exs index 7029c3e59bac..768bdd7ae5bb 100644 --- a/apps/indexer/test/indexer/fetcher/uncle_block_test.exs +++ b/apps/indexer/test/indexer/fetcher/uncle_block_test.exs @@ -61,6 +61,7 @@ defmodule Indexer.Fetcher.UncleBlockTest do uncle_hash_data = to_string(uncle_hash) uncle_uncle_hash_data = to_string(block_hash()) index_data = integer_to_quantity(index) + transaction_hash = "0x3a3eb134e6792ce9403ea4188e5e79693de9e4c94e499db132be086400da79e6" EthereumJSONRPC.Mox |> expect(:json_rpc, fn [ @@ -104,7 +105,7 @@ defmodule Indexer.Fetcher.UncleBlockTest do "from" => "0xe8ddc5c7a2d2f0d7a9798459c0104fdf5e987aca", "gas" => "0x47b760", "gasPrice" => "0x174876e800", - "hash" => "0x3a3eb134e6792ce9403ea4188e5e79693de9e4c94e499db132be086400da79e6", + "hash" => transaction_hash, "input" => "0x", "nonce" => "0x0", "r" => "0xad3733df250c87556335ffe46c23e34dbaffde93097ef92f52c88632a40f0c75", @@ -135,6 +136,15 @@ defmodule Indexer.Fetcher.UncleBlockTest do ) end) + wait_for_results(fn -> + Repo.one!( + from(tf in Chain.Transaction.Fork, + where: tf.hash == ^transaction_hash, + select: tf.hash + ) + ) + end) + refute is_nil(Repo.get(Chain.Block, uncle_hash)) assert Repo.aggregate(Chain.Transaction.Fork, :count, :hash) == 1 end From 6f26cd1cf85e3e2a7d1e2305dd1d47cedcb81c26 Mon Sep 17 00:00:00 2001 From: dylan tirandaz <93934418+dylantirandaz@users.noreply.github.com> Date: Thu, 26 Mar 2026 04:16:48 -0500 Subject: [PATCH 39/42] fix: sync GraphQL language enum with SmartContract schema (#14109) Co-authored-by: Victor Baranov --- .../block_scout_web/graphql/schema/types.ex | 21 +++--------- .../graphql/schema/query/address_test.exs | 33 +++++++++++++++++++ 2 files changed, 37 insertions(+), 17 deletions(-) diff --git a/apps/block_scout_web/lib/block_scout_web/graphql/schema/types.ex b/apps/block_scout_web/lib/block_scout_web/graphql/schema/types.ex index 7b655498dfb7..74fbd1b664d2 100644 --- a/apps/block_scout_web/lib/block_scout_web/graphql/schema/types.ex +++ b/apps/block_scout_web/lib/block_scout_web/graphql/schema/types.ex @@ -107,10 +107,11 @@ end defmodule BlockScoutWeb.GraphQL.Schema.Types do @moduledoc false - use Utils.CompileTimeEnvHelper, chain_type: [:explorer, :chain_type] require BlockScoutWeb.GraphQL.Schema.{Transaction, SmartContracts} + alias Ecto.Enum + use Absinthe.Schema.Notation use Absinthe.Relay.Schema.Notation, :modern @@ -124,23 +125,9 @@ defmodule BlockScoutWeb.GraphQL.Schema.Types do alias BlockScoutWeb.GraphQL.Schema.SmartContracts, as: SmartContractsSchema alias BlockScoutWeb.GraphQL.Schema.Transaction, as: TransactionSchema + alias Explorer.Chain.SmartContract - # TODO: leverage `Ecto.Enum.values(SmartContract, :language)` to deduplicate - # language definitions - @default_languages ~w(solidity vyper yul)a - - case @chain_type do - :arbitrum -> - @chain_type_languages ~w(stylus_rust)a - - :zilliqa -> - @chain_type_languages ~w(scilla)a - - _ -> - @chain_type_languages ~w()a - end - - enum(:language, values: @default_languages ++ @chain_type_languages) + enum(:language, values: Enum.values(SmartContract, :language)) import_types(Absinthe.Type.Custom) import_types(BlockScoutWeb.GraphQL.Schema.Scalars) diff --git a/apps/block_scout_web/test/block_scout_web/graphql/schema/query/address_test.exs b/apps/block_scout_web/test/block_scout_web/graphql/schema/query/address_test.exs index 3b0c3fec22a0..dbec56c68204 100644 --- a/apps/block_scout_web/test/block_scout_web/graphql/schema/query/address_test.exs +++ b/apps/block_scout_web/test/block_scout_web/graphql/schema/query/address_test.exs @@ -97,6 +97,39 @@ defmodule BlockScoutWeb.GraphQL.Schema.Query.AddressTest do } end + test "smart_contract language field returns all supported languages", %{conn: conn} do + for language <- [:solidity, :vyper, :yul, :geas] do + address = insert(:address, fetched_coin_balance: 100) + insert(:smart_contract, address_hash: address.hash, contract_code_md5: "123", language: language) + + query = """ + query ($hash: AddressHash!) { + address(hash: $hash) { + smart_contract { + language + } + } + } + """ + + variables = %{"hash" => to_string(address.hash)} + + conn = get(conn, "/api/v1/graphql", query: query, variables: variables) + + expected = language |> to_string() |> String.upcase() + + assert %{ + "data" => %{ + "address" => %{ + "smart_contract" => %{ + "language" => ^expected + } + } + } + } = json_response(conn, 200) + end + end + test "errors for non-existent address hash", %{conn: conn} do address = build(:address) From 0b59ff29d45f4019af87a8d934970b4a0c42cbff Mon Sep 17 00:00:00 2001 From: Victor Baranov Date: Thu, 26 Mar 2026 17:13:11 +0300 Subject: [PATCH 40/42] Add info about recents releases to CHANGELOG --- CHANGELOG.md | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9cfcaed2100d..34e05d0bd601 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,89 @@ # Changelog +## 10.2.3 + +### 🐛 Bug Fixes + +- Allow fetching of stale token balances ([#14154](https://github.com/blockscout/blockscout/issues/14154)) + + +## 10.2.2 + +### 🐛 Bug Fixes + +- Fix token transfers block_consensus setting ([#14005](https://github.com/blockscout/blockscout/issues/14005)) +- OP Withdrawals indexer enhancement ([#14056](https://github.com/blockscout/blockscout/issues/14056)) + +### ⚙️ Miscellaneous Tasks + +- Add token transfer consensus sanitizer ([#14144](https://github.com/blockscout/blockscout/issues/14144)) + +### New ENV variables + +| Variable | Description | Parameters | +|-----------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------| +| `INDEXER_TOKEN_TRANSFER_BLOCK_CONSENSUS_SANITIZER_INTERVAL` | Interval for token transfer block consensus sanitizer. [Time format](/setup/env-variables/backend-env-variables#time-format). Implemented in [#14144](https://github.com/blockscout/blockscout/pull/14144). | Version: v10.2.2\+
Default: `20m`
Applications: Indexer | + + +## 10.2.1 + +### 🐛 Bug Fixes + +- Notify.check_auth0 for Keycloak and Dynamic ([#14146](https://github.com/blockscout/blockscout/issues/14146)) + +## 10.2.0 + +### 🚀 Features + +- Fetch transaction receipts by block ([#14046](https://github.com/blockscout/blockscout/issues/14046)) + +### New ENV variables + +| Variable | Description | Parameters | +|-----------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------| +| `ETHEREUM_JSONRPC_RECEIPTS_BY_BLOCK` | If `true`, block fetchers will fetch transaction receipts by block instead of per transaction. Implemented in [#14046](https://github.com/blockscout/blockscout/pull/14046) | Version: v10.2.0\+
Default: `false`
Applications: API, Indexer | +| `ETHEREUM_JSONRPC_MAX_RECEIPTS_BY_BLOCK` | Max number of transactions in block for which receipts will be fetched by block. If block has more transactions, receipts will be fetched per transaction in purpose of reducing response body size. Implemented in [#14046](https://github.com/blockscout/blockscout/pull/14046) | Version: v10.2.0\+
Default: `1000`
Applications: API, Indexer | + + +## 10.1.1 + +### 🐛 Bug Fixes + +- Authentication provider token redis key ([#14137](https://github.com/blockscout/blockscout/issues/14137)) + + +## 10.1.0 + +### 🚀 Features + +- KeyCloak integration ([#14068](https://github.com/blockscout/blockscout/issues/14068)) + +### New ENV variables + +| Variable | Description | Parameters | +|-----------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------| +| `ACCOUNT_SENDGRID_OTP_TEMPLATE` | Sendgrid email OTP template for login with email functionality. Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | +| `ACCOUNT_KEYCLOAK_DOMAIN` | Domain for [Keycloak](https://www.keycloak.org/). Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | +| `ACCOUNT_KEYCLOAK_REALM` | Realm for [Keycloak](https://www.keycloak.org/). Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | +| `ACCOUNT_KEYCLOAK_CLIENT_ID` | [Keycloak](https://www.keycloak.org/) client ID. Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | +| `ACCOUNT_KEYCLOAK_CLIENT_SECRET` | [Keycloak](https://www.keycloak.org/) client secret. Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | +| `ACCOUNT_KEYCLOAK_EMAIL_WEBHOOK_URL` | URL address where new email users are reported. Implemented in [#14068](https://github.com/blockscout/blockscout/pull/14068). | Version: v9.4.0\+
Default: (empty)
Applications: API | + + +## 10.0.8 + +### 🐛 Bug Fixes + +- Zetachain internal txs fetching error ([#14122](https://github.com/blockscout/blockscout/issues/14122)) + + +## 10.0.7 + +### 🐛 Bug Fixes + +- Add missing DenormalizationHelper alias in state changes ([#14119](https://github.com/blockscout/blockscout/issues/14119)) + + ## 10.0.6 ### 🐛 Bug Fixes From 299d3f96b9f49616858920f475cedf46c1eb0f3c Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Fri, 27 Mar 2026 12:40:06 +0300 Subject: [PATCH 41/42] fix: celo election rewards csv export (#14160) --- .../lib/explorer/chain/celo/election_reward.ex | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/apps/explorer/lib/explorer/chain/celo/election_reward.ex b/apps/explorer/lib/explorer/chain/celo/election_reward.ex index 5a0afc644de5..3ac72952d14b 100644 --- a/apps/explorer/lib/explorer/chain/celo/election_reward.ex +++ b/apps/explorer/lib/explorer/chain/celo/election_reward.ex @@ -356,29 +356,25 @@ defmodule Explorer.Chain.Celo.ElectionReward do end) end - @doc """ - Custom filter for `ElectionReward`, inspired by - `Explorer.Chain.Block.Reader.General.where_block_number_in_period/3` - """ @spec where_epoch_number_in_period( Ecto.Query.t(), String.t() | integer() | nil, String.t() | integer() | nil ) :: Ecto.Query.t() - def where_epoch_number_in_period(base_query, nil, nil), + defp where_epoch_number_in_period(base_query, nil, nil), do: base_query - def where_epoch_number_in_period(base_query, nil, to_epoch), - do: where(base_query, [reward], reward.epoch_number < ^to_epoch) + defp where_epoch_number_in_period(base_query, nil, to_epoch), + do: where(base_query, [reward], reward.epoch_number <= ^to_epoch) - def where_epoch_number_in_period(base_query, from_epoch, nil), + defp where_epoch_number_in_period(base_query, from_epoch, nil), do: where(base_query, [reward], reward.epoch_number >= ^from_epoch) - def where_epoch_number_in_period(base_query, from_epoch, to_epoch), + defp where_epoch_number_in_period(base_query, from_epoch, to_epoch), do: where( base_query, [reward], - reward.epoch_number >= ^from_epoch and reward.epoch_number < ^to_epoch + reward.epoch_number >= ^from_epoch and reward.epoch_number <= ^to_epoch ) end From fecacf724d5d6c70389f304bf8b06242304fd8f0 Mon Sep 17 00:00:00 2001 From: Maxim Filonov <53992153+sl1depengwyn@users.noreply.github.com> Date: Fri, 27 Mar 2026 16:18:06 +0300 Subject: [PATCH 42/42] fix: keycloak address displaying (#14155) --- apps/explorer/lib/explorer/third_party_integrations/keycloak.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex index 4c015a77302b..1f174cfad9a5 100644 --- a/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex +++ b/apps/explorer/lib/explorer/third_party_integrations/keycloak.ex @@ -475,7 +475,7 @@ defmodule Explorer.ThirdPartyIntegrations.Keycloak do defp do_send_registration_webhook(_email, nil), do: :ok defp create_auth(user, address_hash \\ nil) do - address_hash = address_hash || List.first(user["attributes"]["address"] || []) + address_hash = address_hash || List.last(user["attributes"]["address"] || []) %Auth{ uid: user["id"],