diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
new file mode 100644
index 0000000..6f90e20
--- /dev/null
+++ b/.devcontainer/Dockerfile
@@ -0,0 +1,7 @@
+FROM mcr.microsoft.com/devcontainers/rust:1-1-bookworm
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+ && apt-get -y install clang lld \
+ && apt-get autoremove -y && apt-get clean -y
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..7080270
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,63 @@
+// For format details, see https://aka.ms/devcontainer.json. For config options, see the
+// README at: https://github.com/devcontainers/templates/tree/main/src/rust
+{
+ "name": "Rust",
+ // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
+ "dockerComposeFile": "docker-compose.yaml",
+ "service": "app",
+ "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
+ "features": {
+ "ghcr.io/devcontainers/features/git-lfs:1": {},
+ "ghcr.io/devcontainers/features/rust:1": {
+ "version": "nightly-2023-01-04"
+ },
+ "ghcr.io/devcontainers/features/node:1": {},
+ "ghcr.io/devcontainers/features/go:1": {},
+ "ghcr.io/devcontainers-extra/features/protoc:1": {},
+ "ghcr.io/nordcominc/devcontainer-features/android-sdk:1": {
+ "platform": "34",
+ "extra_packages": "ndk;26.3.11579264 emulator system-images;android-34;google_apis;x86_64"
+ },
+ "ghcr.io/devcontainers/features/desktop-lite:1": {}
+ },
+ "onCreateCommand": ".devcontainer/oncreate.sh",
+ // "updateContentCommand": ".devcontainer/updatecontent.sh",
+ "forwardPorts": [
+ 6080,
+ 5901
+ ],
+ "portsAttributes": {
+ "6080": {
+ "label": "VNC web client (noVNC)",
+ "onAutoForward": "silent"
+ },
+ "5901": {
+ "label": "VNC TCP port",
+ "onAutoForward": "silent"
+ }
+ },
+ "hostRequirements": {
+ "memory": "9gb"
+ },
+ "remoteEnv": {
+ "RUSTFLAGS": "-C link-arg=-fuse-ld=lld"
+ },
+ // Use 'mounts' to make the cargo cache persistent in a Docker Volume.
+ // "mounts": [
+ // {
+ // "source": "devcontainer-cargo-cache-${devcontainerId}",
+ // "target": "/usr/local/cargo",
+ // "type": "volume"
+ // }
+ // ]
+ // Features to add to the dev container. More info: https://containers.dev/features.
+ // "features": {},
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
+ // "forwardPorts": [],
+ // Use 'postCreateCommand' to run commands after the container is created.
+ // "postCreateCommand": "rustc --version",
+ // Configure tool-specific properties.
+ // "customizations": {},
+ // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+ "remoteUser": "root"
+}
diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml
new file mode 100644
index 0000000..eb1c569
--- /dev/null
+++ b/.devcontainer/docker-compose.yaml
@@ -0,0 +1,38 @@
+version: '3.8'
+
+volumes:
+ postgres-data:
+
+services:
+ app:
+ build:
+ context: .
+ dockerfile: Dockerfile
+
+ volumes:
+ - ../..:/workspaces:cached
+
+ # Overrides default command so things don't shut down after the process ends.
+ command: sleep infinity
+
+ # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
+ network_mode: service:db
+
+ devices:
+ - "/dev/kvm:/dev/kvm"
+
+ # Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
+ # (Adding the "ports" property to this file will not forward from a Codespace.)
+
+ db:
+ image: postgres:15.13
+ restart: unless-stopped
+ volumes:
+ - postgres-data:/var/lib/postgresql/data
+ environment:
+ POSTGRES_HOST_AUTH_METHOD: trust
+ POSTGRES_DB: guild
+ POSTGRES_USER: root
+
+ # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
+ # (Adding the "ports" property to this file will not forward from a Codespace.)
diff --git a/.devcontainer/oncreate.sh b/.devcontainer/oncreate.sh
new file mode 100755
index 0000000..ba7c66c
--- /dev/null
+++ b/.devcontainer/oncreate.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env sh
+
+sed -i 's/"runOn": "default",/"runOn": "folderOpen",/g' .vscode/tasks.json
+
+avdmanager create avd -n MyDevice -k 'system-images;android-34;google_apis;x86_64' -d pixel
+
+(
+ cd /tmp &&\
+ wget https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-linux-v3.0.tar.gz &&\
+ sudo tar -xvf scrcpy-linux-v3.0.tar.gz -C /var/lib &&\
+ sudo ln -s /var/lib/scrcpy-linux-v3.0/scrcpy_bin /usr/bin/scrcpy &&\
+ rm scrcpy-linux-v3.0.tar.gz
+)
+
+# Trigger rustup toolchain install.
+# This should be installed by the devcontainer feature,
+# but there seems to be a race condition that causes the tasks
+# to download the toolchain, and if multiple try to download it at once,
+# they fail and the installed toolchain is broken.
+cargo --version
\ No newline at end of file
diff --git a/.devcontainer/updatecontent.sh b/.devcontainer/updatecontent.sh
new file mode 100755
index 0000000..1b13710
--- /dev/null
+++ b/.devcontainer/updatecontent.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env
+
+# yarn install is tooooo slow
+# (cd eth && yarn --frozen-lockfile)
+# (cd app/packages/payy && yarn --frozen-lockfile)
diff --git a/.github/workflows/beam.release.yml b/.github/workflows/beam.release.yml
new file mode 100644
index 0000000..6e09886
--- /dev/null
+++ b/.github/workflows/beam.release.yml
@@ -0,0 +1,183 @@
+name: Beam / Release
+
+on:
+ workflow_dispatch:
+ # GitHub does not support branch filters for manual dispatches.
+ # Jobs below explicitly gate releases to refs/heads/main.
+ push:
+ branches:
+ - main
+ paths:
+ - ".github/workflows/beam.release.yml"
+ - "pkg/beam-cli/**"
+ - "scripts/install-beam.sh"
+ - "Cargo.lock"
+ - "Cargo.toml"
+ - "rust-toolchain.toml"
+
+permissions:
+ contents: write
+
+concurrency:
+ group: beam-release-${{ github.ref }}
+ cancel-in-progress: false
+
+jobs:
+ detect-version:
+ if: github.repository == 'polybase/payy' && github.ref == 'refs/heads/main'
+ runs-on: ubuntu-latest
+ outputs:
+ is_prerelease: ${{ steps.detect.outputs.is_prerelease }}
+ should_release: ${{ steps.detect.outputs.should_release }}
+ tag: ${{ steps.detect.outputs.tag }}
+ version: ${{ steps.detect.outputs.version }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Detect beam version change
+ id: detect
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ version="$(awk -F' = ' '/^version = / { gsub(/"/, "", $2); print $2; exit }' pkg/beam-cli/Cargo.toml)"
+ tag="beam-v${version}"
+ if [[ "$version" == *-* ]]; then
+ is_prerelease=true
+ else
+ is_prerelease=false
+ fi
+
+ echo "version=${version}" >> "$GITHUB_OUTPUT"
+ echo "tag=${tag}" >> "$GITHUB_OUTPUT"
+ echo "is_prerelease=${is_prerelease}" >> "$GITHUB_OUTPUT"
+
+ # Beam release tags are immutable: never rebuild or republish an existing version.
+ if git ls-remote --exit-code --tags origin "refs/tags/${tag}" >/dev/null 2>&1; then
+ echo "Beam tag ${tag} already exists; skipping release publication."
+ echo "should_release=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
+ if [[ "${GITHUB_REF}" != "refs/heads/main" ]]; then
+ echo "Manual beam releases are only allowed from refs/heads/main; skipping ${GITHUB_REF}."
+ echo "should_release=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ echo "should_release=true" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ before="${{ github.event.before }}"
+ if [[ -z "$before" || "$before" == "0000000000000000000000000000000000000000" ]]; then
+ echo "should_release=true" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ if git diff --quiet "$before" "${{ github.sha }}" -- pkg/beam-cli/Cargo.toml; then
+ echo "should_release=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ if git show "${before}:pkg/beam-cli/Cargo.toml" > /tmp/beam-prev-cargo.toml 2>/dev/null; then
+ before_version="$(
+ awk -F' = ' '/^version = / { gsub(/"/, "", $2); print $2; exit }' /tmp/beam-prev-cargo.toml
+ )"
+
+ if [[ "$before_version" == "$version" ]]; then
+ echo "should_release=false" >> "$GITHUB_OUTPUT"
+ else
+ echo "should_release=true" >> "$GITHUB_OUTPUT"
+ fi
+ else
+ echo "No previous pkg/beam-cli/Cargo.toml; treating as version changed."
+ echo "should_release=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ build:
+ needs: detect-version
+ if: github.repository == 'polybase/payy' && github.ref == 'refs/heads/main' && needs.detect-version.outputs.should_release == 'true'
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - runner: ubuntu-latest
+ target: x86_64-unknown-linux-gnu
+ - runner: macos-15-intel
+ target: x86_64-apple-darwin
+ - runner: macos-14
+ target: aarch64-apple-darwin
+ runs-on: ${{ matrix.runner }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Setup Rust toolchain
+ run: rustup show
+
+ - name: Build beam release binary
+ run: cargo build --release --locked -p beam-cli --bin beam --target ${{ matrix.target }}
+
+ - name: Package release asset
+ shell: bash
+ run: |
+ set -euo pipefail
+ mkdir -p dist
+ cp "target/${{ matrix.target }}/release/beam" "dist/beam-${{ matrix.target }}"
+
+ - name: Upload release asset
+ uses: actions/upload-artifact@v4
+ with:
+ name: beam-${{ matrix.target }}
+ path: dist/beam-${{ matrix.target }}
+ if-no-files-found: error
+
+ release:
+ needs:
+ - detect-version
+ - build
+ if: github.repository == 'polybase/payy' && github.ref == 'refs/heads/main' && needs.detect-version.outputs.should_release == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Download release assets
+ uses: actions/download-artifact@v4
+ with:
+ path: dist
+ merge-multiple: true
+
+ - name: Validate release assets
+ shell: bash
+ run: |
+ set -euo pipefail
+ expected_assets=(
+ dist/beam-x86_64-unknown-linux-gnu
+ dist/beam-x86_64-apple-darwin
+ dist/beam-aarch64-apple-darwin
+ )
+
+ for asset in "${expected_assets[@]}"; do
+ if [[ ! -f "$asset" ]]; then
+ echo "Expected release asset missing or not a file: $asset" >&2
+ exit 1
+ fi
+ done
+
+ - name: Publish GitHub Release
+ uses: softprops/action-gh-release@v2
+ with:
+ tag_name: ${{ needs.detect-version.outputs.tag }}
+ name: beam ${{ needs.detect-version.outputs.version }}
+ generate_release_notes: true
+ prerelease: ${{ needs.detect-version.outputs.is_prerelease == 'true' }}
+ files: |
+ dist/beam-x86_64-unknown-linux-gnu
+ dist/beam-x86_64-apple-darwin
+ dist/beam-aarch64-apple-darwin
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 8261a22..ccbcfdb 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -72,7 +72,7 @@ jobs:
cargo-test:
name: Cargo Test in Docker
runs-on: ${{ vars.RUNNER_LABELS}}
- timeout-minutes: 80
+ timeout-minutes: 40
steps:
- name: Checkout
uses: actions/checkout@v4
diff --git a/Cargo.lock b/Cargo.lock
index 1c37f1c..41a6cb1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1496,6 +1496,18 @@ dependencies = [
"rustversion",
]
+[[package]]
+name = "argon2"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
+dependencies = [
+ "base64ct",
+ "blake2",
+ "cpufeatures",
+ "password-hash 0.5.0",
+]
+
[[package]]
name = "ark-bls12-381"
version = "0.5.0"
@@ -2384,6 +2396,45 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a"
+[[package]]
+name = "beam-cli"
+version = "0.1.0"
+dependencies = [
+ "argon2",
+ "async-trait",
+ "clap",
+ "contextful",
+ "contracts",
+ "dirs",
+ "encrypt",
+ "eth-util",
+ "futures",
+ "hex",
+ "insta",
+ "json-store",
+ "mockito",
+ "num-bigint",
+ "rand 0.8.5",
+ "reqwest 0.12.28",
+ "rlp 0.6.1",
+ "rpassword",
+ "rustyline",
+ "secp256k1 0.28.2",
+ "self-replace",
+ "semver 1.0.27",
+ "serde",
+ "serde_json",
+ "serde_yaml",
+ "serial_test",
+ "sha2",
+ "shlex",
+ "tempfile",
+ "thiserror 1.0.69",
+ "tokio",
+ "web3",
+ "workspace-hack",
+]
+
[[package]]
name = "bech32"
version = "0.9.1"
@@ -3233,6 +3284,15 @@ dependencies = [
"workspace-hack",
]
+[[package]]
+name = "clipboard-win"
+version = "5.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bde03770d3df201d4fb868f2c9c59e66a3e4e2bd06692a0fe701e7103c7e84d4"
+dependencies = [
+ "error-code",
+]
+
[[package]]
name = "cmake"
version = "0.1.57"
@@ -4269,6 +4329,17 @@ dependencies = [
"syn 2.0.112",
]
+[[package]]
+name = "diesel_migrations"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "745fd255645f0f1135f9ec55c7b00e0882192af9683ab4731e4bba3da82b8f9c"
+dependencies = [
+ "diesel",
+ "migrations_internals",
+ "migrations_macros",
+]
+
[[package]]
name = "diesel_table_macro_syntax"
version = "0.3.0"
@@ -4755,6 +4826,12 @@ dependencies = [
"x25519-dalek 2.0.1",
]
+[[package]]
+name = "endian-type"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
+
[[package]]
name = "enr"
version = "0.13.0"
@@ -4854,6 +4931,12 @@ dependencies = [
"version_check",
]
+[[package]]
+name = "error-code"
+version = "3.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59"
+
[[package]]
name = "eth-util"
version = "0.1.0"
@@ -5170,6 +5253,17 @@ dependencies = [
"workspace-hack",
]
+[[package]]
+name = "fd-lock"
+version = "4.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
+dependencies = [
+ "cfg-if",
+ "rustix 1.1.3",
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "fdlimit"
version = "0.3.0"
@@ -5751,6 +5845,7 @@ dependencies = [
"deadpool",
"diesel",
"diesel-async",
+ "diesel_migrations",
"dirs",
"document-ai-google",
"document-ai-interface",
@@ -8450,6 +8545,27 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "migrations_internals"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36c791ecdf977c99f45f23280405d7723727470f6689a5e6dbf513ac547ae10d"
+dependencies = [
+ "serde",
+ "toml 0.9.11+spec-1.1.0",
+]
+
+[[package]]
+name = "migrations_macros"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36fc5ac76be324cfd2d3f2cf0fdf5d5d3c4f14ed8aaebadb09e304ba42282703"
+dependencies = [
+ "migrations_internals",
+ "proc-macro2",
+ "quote",
+]
+
[[package]]
name = "mime"
version = "0.3.17"
@@ -8820,6 +8936,15 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
+[[package]]
+name = "nibble_vec"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43"
+dependencies = [
+ "smallvec",
+]
+
[[package]]
name = "nix"
version = "0.26.4"
@@ -10258,6 +10383,17 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "password-hash"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
+dependencies = [
+ "base64ct",
+ "rand_core 0.6.4",
+ "subtle",
+]
+
[[package]]
name = "pasta_curves"
version = "0.5.1"
@@ -10305,6 +10441,7 @@ dependencies = [
"alloy-genesis",
"alloy-primitives",
"alloy-rpc-types-engine",
+ "async-trait",
"barretenberg-cli",
"barretenberg-interface",
"bn254_blackbox_solver",
@@ -10314,6 +10451,7 @@ dependencies = [
"contextful",
"element",
"ethers-solc",
+ "flate2",
"hash",
"indexmap 2.13.0",
"reqwest 0.12.28",
@@ -10397,7 +10535,7 @@ checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917"
dependencies = [
"digest 0.10.7",
"hmac",
- "password-hash",
+ "password-hash 0.4.2",
"sha2",
]
@@ -11630,6 +11768,16 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
+[[package]]
+name = "radix_trie"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd"
+dependencies = [
+ "endian-type",
+ "nibble_vec",
+]
+
[[package]]
name = "rain-http"
version = "0.1.0"
@@ -15208,6 +15356,17 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746"
+[[package]]
+name = "rpassword"
+version = "7.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66d4c8b64f049c6721ec8ccec37ddfc3d641c4a7fca57e8f2a89de509c73df39"
+dependencies = [
+ "libc",
+ "rtoolbox",
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "rpc"
version = "1.3.0"
@@ -15287,6 +15446,16 @@ dependencies = [
"tokio",
]
+[[package]]
+name = "rtoolbox"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7cc970b249fbe527d6e02e0a227762c9108b2f49d81094fe357ffc6d14d7f6f"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
[[package]]
name = "ruint"
version = "1.17.2"
@@ -15680,6 +15849,28 @@ dependencies = [
"wait-timeout",
]
+[[package]]
+name = "rustyline"
+version = "17.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e902948a25149d50edc1a8e0141aad50f54e22ba83ff988cf8f7c9ef07f50564"
+dependencies = [
+ "bitflags 2.10.0",
+ "cfg-if",
+ "clipboard-win",
+ "fd-lock",
+ "home",
+ "libc",
+ "log",
+ "memchr",
+ "nix 0.30.1",
+ "radix_trie",
+ "unicode-segmentation",
+ "unicode-width 0.2.2",
+ "utf8parse",
+ "windows-sys 0.60.2",
+]
+
[[package]]
name = "rw-stream-sink"
version = "0.3.0"
@@ -16014,6 +16205,17 @@ dependencies = [
"libc",
]
+[[package]]
+name = "self-replace"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03ec815b5eab420ab893f63393878d89c90fdd94c0bcc44c07abb8ad95552fb7"
+dependencies = [
+ "fastrand 2.3.0",
+ "tempfile",
+ "windows-sys 0.52.0",
+]
+
[[package]]
name = "semver"
version = "0.11.0"
@@ -20250,6 +20452,7 @@ dependencies = [
"serde",
"serde_core",
"serde_json",
+ "serde_spanned 1.0.4",
"serde_with",
"sha1",
"sha2",
@@ -20273,6 +20476,7 @@ dependencies = [
"tokio-stream",
"tokio-tungstenite",
"tokio-util",
+ "toml 0.9.11+spec-1.1.0",
"tower",
"tower-http",
"tracing",
diff --git a/Cargo.toml b/Cargo.toml
index 282bc67..dbe23a2 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -39,6 +39,7 @@ barretenberg-api-bin = { path = "./pkg/barretenberg-api-bin" }
barretenberg-api-client = { path = "./pkg/barretenberg-api-client" }
barretenberg-api-tests = { path = "./pkg/barretenberg-api-tests" }
barretenberg-rs = { path = "./pkg/barretenberg-rs" }
+beam-cli = { path = "./pkg/beam-cli" }
zk-circuits = { path = "./pkg/zk-circuits" }
contextful = { path = "./pkg/contextful" }
contextful-macros = { path = "./pkg/contextful-macros" }
@@ -186,6 +187,7 @@ bn254_blackbox_solver = { git = "https://github.com/noir-lang/noir", tag = "v1.0
nargo = { git = "https://github.com/noir-lang/noir", tag = "v1.0.0-beta.14" }
actix-multipart = "0.7.2"
aes-gcm = "0.10.3"
+argon2 = "0.5.3"
async-stripe = { version = "0.41", default-features = false, features = [
"full",
"webhook-events",
@@ -220,6 +222,7 @@ diesel = { version = "2.3.7", features = [
"64-column-tables",
] }
diesel-async = "0.7.4"
+diesel_migrations = "2.3"
tokio-postgres = { version = "0.7.16" }
postgres-native-tls = "0.5.0"
native-tls = "0.2.15"
@@ -278,6 +281,7 @@ rand_xorshift = "0.3"
reqwest = { version = "0.12", features = ["json", "multipart"] }
rlp = "0.6.1"
rocksdb = "0.21"
+rpassword = "7.4.0"
rustc-hex = "2.1.0"
rust-i18n = "3"
rsa = { version = "0.9", features = ["sha1"] }
@@ -294,9 +298,11 @@ unimock = "0.6.8"
secp256k1 = { version = "0.28.0", features = ["rand", "global-context", "recovery"] }
url = { version = "2.5.8" }
semver = "1.0.15"
+shlex = "1.3.0"
sha1 = "0.10.1"
sha2 = "0.10.6"
sha3 = "0.10.1"
+self-replace = "1.5.0"
spinoff = "0.8.0"
syn = { version = "2.0", features = ["full", "extra-traits"] }
tracing-stackdriver = { version = "0.7.2", features = ["valuable"] }
@@ -328,6 +334,7 @@ user-error = "1.2.8"
uuid = { version = "1.18.1", features = ["v4", "serde"] }
web3 = "0.19.0"
which = "4.4"
+rustyline = "17.0.2"
serial_test = { version = "3.0.0", features = ["file_locks"] }
# the `de_strict_order` flag is important for maintaining bijection
borsh = { version = "1", features = ["derive", "de_strict_order", "rc"] }
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..1503950
--- /dev/null
+++ b/README.md
@@ -0,0 +1,389 @@
+
+
+
+
+# Payy - ZK Rollup
+
+An Ethereum L2 zk-rollup for privacy preserving and regulatory compliant transactions.
+
+Here are some highlights:
+
+- Fast - runs in under 3 seconds on an iPhone
+- Tiny - UTXO proofs are under 2.8KB
+- EVM-compatible - proofs can be verified on Ethereum
+
+For a detailed description of the architecture, download the [whitepaper](https://polybase.github.io/zk-rollup/whitepaper.pdf) or visit the [docs](https://payy.network/docs).
+
+
+| Module | Path | Desc |
+|--------------------|-----------------------------------------|-----------------------------------------------------------------|
+| Frontends / TypeScript | [app](/app) | Frontend applications and TypeScript packages |
+| Ethereum Contracts | [eth](/eth) | Ethereum smart contracts to verify state transitions and proofs |
+| Noir | [noir](/noir) | Noir circuits and related tooling |
+| Aggregator | [pkg/aggregator](/pkg/aggregator) | Rollup aggregation services and supporting logic |
+| Node | [pkg/node](/pkg/node) | Core node implementation for the Payy network |
+| Prover | [pkg/prover](/pkg/prover) | Core prover logic |
+| RPC | [pkg/rpc](/pkg/rpc) | RPC common utilities shared across all RPC services |
+| Smirk | [pkg/smirk](/pkg/smirk) | Sparse merkle tree |
+| ZK-Primitives | [pkg/zk-primitives](/pkg/zk-primitives) | ZK primitives used across multiple modules |
+
+
+## Git LFS
+
+We use [Git LFS](https://git-lfs.com/) for large files such as proving parameters.
+
+A one-time setup is required for local development:
+
+1. Install `git lfs` by following the instructions at .
+2. From the repository root, run:
+
+```bash
+git lfs install
+git lfs pull
+```
+
+## Get Started
+
+There are two core services needed to run the zk rollup stack, and you should start them in order:
+
+1. Eth (with contracts deployed)
+2. Node
+
+### Prerequisites
+
+ - [Rust/Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html)
+ - [Go](https://go.dev/doc/install)
+ - [Node/nvm/yarn](https://github.com/nvm-sh/nvm?tab=readme-ov-file#installing-and-updating)
+ - [Postgres](https://www.postgresql.org/download/)
+
+### Get Started (using VS Code)
+
+You can run all of the services using the VSCode dev container:
+
+Cmd-P -> "Dev Containers: Reopen in Container"
+
+
+### Get Started (using docker)
+
+You can run all of the services using docker
+
+Run
+```bash
+docker compose up -f ./docker/docker-compose.yml up -d
+```
+
+To only run services that are needed for a dev environment
+
+Run
+```bash
+docker compose up -f ./docker/docker-compose.yml --profile dev up -d
+```
+
+
+To only run services that are needed for integration tests
+
+Run
+```bash
+docker compose up -f ./docker/docker-compose.yml --profile test up -d
+```
+
+
+To only run services that are needed for CI workflows
+
+Run
+```bash
+docker compose up -f ./docker/docker-compose.yml --profile ci up -d
+```
+
+
+To run the prover (optional) to enable withdrawals
+
+Run
+```bash
+docker compose up -f ./docker/docker-compose.yml --profile prover up -d
+```
+
+
+### Automated Setup
+
+Once the prerequisites above are installed you can bootstrap the local tooling with:
+
+```bash
+eval "$(cargo xtask setup)"
+```
+
+**What this does:** The `cargo xtask setup` command installs the bb and nargo toolchains, ensures the `polybase-pg` Postgres container is running with the latest migrations, and installs the Ethereum workspace dependencies under `eth/`. It prints shell `export` commands to stdout, and wrapping it in `eval "$(...)"` executes those exports in your current shell so `DATABASE_URL` and any `PATH` updates take effect.
+
+**Environment variables set:**
+- `DATABASE_URL` - Connection string for the local Postgres database
+
+**Important:** These exports only persist for the current terminal session. For convenience, consider integrating this command into a repo-specific development shell (for example: direnv, nix shell, guix container) rather than global shell profiles like `.bashrc` or `.zshrc`, because the setup is too heavyweight for global profiles.
+
+Re-run the command whenever you need to refresh the development environment; it is safe and idempotent.
+
+### Targeted Tests
+
+Run the fast test wrapper during development to avoid rebuilding unaffected crates:
+
+```bash
+cargo xtask test
+```
+
+The command detects workspace crates with local changes (and any dependents), builds tests once via `cargo test --workspace --no-run`, then runs only the compiled test binaries for the affected crates (changed first, then their dependents), exiting early if nothing relevant changed.
+
+### Revi
+
+Download and run the `revi` helper with any arguments (cached under `~/.polybase/revi`):
+
+```bash
+cargo xtask revi --
+```
+
+### Local Binaries (debian only)
+
+You will need to install the following packages:
+
+```
+apt install libglib2.0-dev libssl-dev libclang-dev python3
+```
+
+
+### Protobuf
+
+Install protobuf
+
+debian:
+
+```
+apt install protobuf-compiler libprotobuf-dev
+```
+
+macos:
+
+```
+brew install protobuf
+```
+
+### Fixture Params
+
+Download the proving params before building or running Docker images. This caches the file in
+`~/.polybase/fixtures/params` (override with `POLYBASE_PARAMS_DIR`):
+
+```bash
+./scripts/download-fixtures-params.sh
+```
+
+### Postgres
+
+Install/run postgres and create a db called `guild`.
+
+docker (recommended):
+
+```bash
+docker run -it --rm -e POSTGRES_HOST_AUTH_METHOD=trust -e POSTGRES_DB=guild -e POSTGRES_USER=$USER -p 5432:5432 postgres:18
+```
+
+macos:
+
+```bash
+brew install postgresql
+brew services start postgresql
+createdb guild
+```
+
+debian:
+
+```bash
+sudo apt install postgresql postgresql-contrib
+sudo systemctl start postgresql
+sudo systemctl enable postgresql
+sudo -i -u postgres
+createdb guild
+```
+
+You should be able to connect to the db using:
+
+```bash
+psql postgres://localhost/guild
+```
+
+(if you're using mac, recommend using [Postico](https://eggerapps.at/postico/v1.php))
+
+
+### Diesel (for postgres schema setup)
+
+Install diesel CLI:
+
+```bash
+cargo install diesel_cli --no-default-features --features postgres
+```
+
+Setup the tables in the postgres database:
+
+```bash
+$ cd pkg/database
+$ diesel migration run
+```
+
+
+### TOML Formatting with Taplo
+
+This repository uses [taplo](https://taplo.tamasfe.dev/) to standardize TOML file formatting across all configuration files, including Cargo.toml, Nargo.toml, and other TOML files.
+
+#### CI Validation
+
+A GitHub Action automatically checks TOML formatting on:
+- Pull requests (when TOML files are modified)
+- Pushes to `main` branches
+- Manual workflow dispatch
+
+The CI will fail if any TOML files don't meet the formatting standards.
+
+#### Installation
+
+Install taplo CLI:
+
+```bash
+cargo install taplo-cli --locked
+# or
+curl -fsSL https://github.com/tamasfe/taplo/releases/latest/download/taplo-.gz | gzip -d - | install -m 755 /dev/stdin /usr/local/bin/taplo
+```
+
+#### Usage
+
+Format all TOML files in the repository:
+
+```bash
+taplo fmt
+```
+
+Check formatting without making changes:
+
+```bash
+taplo fmt --check
+```
+
+Validate all TOML files for syntax errors:
+
+```bash
+taplo check
+```
+
+The formatting configuration is defined in `taplo.toml` at the repository root. The configuration ensures consistent formatting with:
+- 2-space indentation
+- Multi-line arrays for better readability
+- Preserved dependency and key ordering
+- Trailing newlines at end of files
+- Node modules directories are excluded from checks
+
+
+### Eth (Ethereum Node)
+
+Setup the [eth node](eth/README.md):
+
+```bash
+$ cd eth
+$ yarn install
+$ yarn eth-node --hostname 0.0.0.0
+```
+
+Then deploy the smart contracts to your eth node (in another terminal):
+
+```bash
+$ cd eth
+$ DEV_USE_NOOP_VERIFIER=1 yarn deploy:local
+```
+
+> [!IMPORTANT]
+> if you stop the `eth-node` server, you will need to redeploy the contracts again.
+
+
+### Node (Payy Network)
+
+Run [node](pkg/node/README.md):
+
+```bash
+$ cargo run --bin node
+```
+
+Run node in prover mode (optional, enables withdrawals):
+
+```bash
+$ cargo run --bin node -- --mode mock-prover --db-path ~/.polybase-prover/db --smirk-path ~/.polybase-prover/smirk --rpc-laddr 0.0.0.0:8092 --p2p-laddr /ip4/127.0.0.1/tcp/5001
+```
+
+> [!IMPORTANT]
+> `eth-node` must be running before starting `node`.
+
+### Guild (API server)
+Run [guild](pkg/guild/README.md):
+
+```bash
+$ cargo run --bin guild -- --firebase-service-account-path=payy-prenet-firebase.json
+```
+
+> [!IMPORTANT]
+> `node` must be running before starting `guild`.
+
+### Give yourself some funds
+
+Get the deposit address from the app (Menu -> Deposit -> Deposit Address)
+
+```bash
+cargo run --bin wallet transfer 100
+```
+
+
+## Tests
+
+
+### Integration tests
+
+```
+cargo test integration_test
+```
+
+### Rust
+
+```
+docker build -f ./docker/Dockerfile.node --target tester .
+```
+
+### Workspace hack crate
+
+We use [`cargo-hakari`](https://docs.rs/cargo-hakari) to keep a unified `workspace-hack` crate in sync across all `Cargo.toml` files. Run the following after adding or modifying workspace dependencies and before opening a pull request:
+
+```
+cargo hakari generate
+cargo hakari manage-deps --yes
+```
+
+The `Rust / Hakari Check` GitHub workflow enforces that the crate stays synchronized; if it fails, re-run the commands above and commit the resulting changes.
+
+## Contributing
+
+We welcome contributions that improve the project for everyone.
+
+### Security vulnerabilities
+
+If you discover a security issue, do not report it publicly. Send a full report to [hello@polybaselabs.com](mailto:hello@polybaselabs.com) so it can be handled responsibly.
+
+### Reporting bugs
+
+If you find a bug, open an issue at [github.com/polybase/payy/issues](https://github.com/polybase/payy/issues) with reproduction steps, environment details, and any relevant logs or screenshots.
+
+### Suggesting enhancements
+
+To propose a feature or improvement, open an issue at [github.com/polybase/payy/issues](https://github.com/polybase/payy/issues) and explain the problem, the proposed change, and why it is useful.
+
+### Submitting pull requests
+
+1. Fork the repository.
+2. Create a feature branch.
+3. Make and test your changes.
+4. Commit and push the branch.
+5. Open a pull request at [github.com/polybase/payy/pulls](https://github.com/polybase/payy/pulls).
diff --git a/app/packages/payy/assets/img/payy-logo-wordmark.png b/app/packages/payy/assets/img/payy-logo-wordmark.png
new file mode 100644
index 0000000..2a12e5b
Binary files /dev/null and b/app/packages/payy/assets/img/payy-logo-wordmark.png differ
diff --git a/app/packages/payy/src/ts-rs-bindings/tsconfig.tsrs.json b/app/packages/payy/src/ts-rs-bindings/tsconfig.tsrs.json
index 30b282c..282a92f 100644
--- a/app/packages/payy/src/ts-rs-bindings/tsconfig.tsrs.json
+++ b/app/packages/payy/src/ts-rs-bindings/tsconfig.tsrs.json
@@ -3,9 +3,10 @@
"strict": true,
"noEmit": true,
"skipLibCheck": true,
- "moduleResolution": "node",
+ "module": "Node16",
+ "moduleResolution": "node16",
"allowSyntheticDefaultImports": true,
"esModuleInterop": true
},
"include": ["*.ts"]
-}
\ No newline at end of file
+}
diff --git a/docker/Dockerfile.aggregator b/docker/Dockerfile.aggregator
new file mode 100644
index 0000000..f9b18c2
--- /dev/null
+++ b/docker/Dockerfile.aggregator
@@ -0,0 +1,126 @@
+# Build aggregator CLI binary
+FROM rust:1-bookworm AS workspace
+
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev libpq-dev pkg-config python3 protobuf-compiler libprotobuf-dev cmake ninja-build
+
+# Ensure the toolchain specified in rust-toolchain.toml is installed
+RUN rustup show
+
+# Force static link preference for pkg-config targets
+ENV SYSROOT=/dummy
+
+# Optional sccache setup backed by GCS, matching the other barretenberg-enabled images
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+FROM workspace AS builder
+
+ARG RELEASE=1
+ENV RELEASE=$RELEASE
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.lock ./
+COPY Cargo.toml ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Remove the RN bridge package which is unused for CLI builds
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+# Add fixtures and artifacts required by workspace members
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+RUN --mount=type=cache,target=/usr/local/cargo/registry \
+ build_flags=$([ "$RELEASE" = "1" ] && echo "--release"); \
+ if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ if RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build -p aggregator-cli --bin aggregator-cli ${build_flags}; then \
+ sccache --show-stats && \
+ sccache --stop-server; \
+ else \
+ status=$?; \
+ echo "sccache-backed build failed (exit ${status}); retrying without sccache"; \
+ sccache --stop-server || true; \
+ cargo build -p aggregator-cli --bin aggregator-cli ${build_flags}; \
+ fi; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build -p aggregator-cli --bin aggregator-cli ${build_flags}; \
+ fi
+
+RUN mkdir -p /build/bin && \
+ cp /build/target/$([ "$RELEASE" = "1" ] && echo "release" || echo "debug")/aggregator-cli /build/bin/aggregator-cli
+
+# Runtime image with barretenberg CLI installed
+FROM debian:bookworm-slim as runtime
+
+ENV ROOT_DIR /polybase
+WORKDIR $ROOT_DIR
+
+USER root
+
+RUN groupadd -g 1001 --system spaceman && \
+ useradd -u 1001 --system --gid spaceman --home "$ROOT_DIR" spaceman && \
+ chown -R spaceman:spaceman "$ROOT_DIR"
+
+RUN apt update && apt install -y curl nano libpq-dev postgresql wget tar ca-certificates
+
+# Download and install barretenberg CLI so the aggregator can spawn proofs
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# Fetch modern libc/libstdc++ plus jq which bb expects
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+# Create directories required by bb and make them writable
+RUN mkdir -p /tmp /.bb-crs && \
+ chown spaceman:spaceman /tmp /.bb-crs && \
+ chmod 755 /tmp /.bb-crs
+
+ARG WAIT_SECONDS=0
+ENV WAIT_SECONDS=$WAIT_SECONDS
+
+RUN echo '#!/bin/bash\n\
+ if [ "$WAIT_SECONDS" -gt 0 ]; then\n\
+ echo "Waiting $WAIT_SECONDS seconds before starting..."\n\
+ sleep $WAIT_SECONDS\n\
+ fi\n\
+ exec "$@"' > /entrypoint-wrapper.sh && chmod +x /entrypoint-wrapper.sh
+
+USER spaceman
+
+COPY --from=builder /build/bin/aggregator-cli /usr/bin/aggregator-cli
+
+STOPSIGNAL SIGTERM
+
+ENTRYPOINT ["/entrypoint-wrapper.sh", "/usr/bin/aggregator-cli"]
diff --git a/docker/Dockerfile.barretenberg-api-server b/docker/Dockerfile.barretenberg-api-server
new file mode 100644
index 0000000..f9cf234
--- /dev/null
+++ b/docker/Dockerfile.barretenberg-api-server
@@ -0,0 +1,204 @@
+# Build binary
+FROM rust:1-bookworm AS workspace
+
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev libpq-dev pkg-config python3 protobuf-compiler libprotobuf-dev cmake ninja-build curl
+
+# Ensure the toolchain specified in rust-toolchain.toml is installed
+RUN rustup show
+
+# Set `SYSROOT` to a dummy path (default is /usr) because pkg-config-rs *always*
+# links those located in that path dynamically but we want static linking, c.f.
+# https://github.com/rust-lang/pkg-config-rs/blob/54325785816695df031cef3b26b6a9a203bbc01b/src/lib.rs#L613
+ENV SYSROOT=/dummy
+
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+
+WORKDIR /build
+
+
+FROM workspace AS tester
+
+SHELL ["/bin/bash", "--login", "-c"]
+
+RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
+RUN nvm install 20 \
+ && ln -s "$(which node)" /usr/bin/node \
+ && ln -s "$(which npm)" /usr/bin/npm \
+ && npm install --global yarn \
+ && ln -s "$(which yarn)" /usr/bin/yarn
+
+# Download and install barretenberg
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# bb requires a recent glibcxx version
+# Enable backports and pull libstdc++ 13.x (exports GLIBCXX_3.4.31)
+# also installs jq, some bb commands require jq
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ # pull only the two runtime libs from testing
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+SHELL ["sh", "-c"]
+
+ARG RELEASE=1
+ENV RELEASE=$RELEASE
+
+COPY rust-toolchain.toml ./
+
+COPY . .
+
+# Run tests as part of RUN, not CMD, because prebuilding and running tests is tricky
+RUN --mount=type=cache,target=/usr/local/cargo/registry \
+ chmod +x ./docker/test.sh && \
+ if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS for tests"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache exec ./docker/test.sh && \
+ sccache --show-stats && \
+ sccache --stop-server; \
+ else \
+ echo "Skipping sccache for tests"; \
+ exec ./docker/test.sh; \
+ fi
+
+CMD ["sh", "-c", "echo 'This image is not meant to be run, only built.' && exit 1"]
+
+
+# Build binary
+FROM workspace AS builder
+
+ARG RELEASE=1
+
+COPY rust-toolchain.toml ./
+
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.lock ./
+COPY Cargo.toml ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+RUN ./scripts/download-fixtures-params.sh
+
+RUN --mount=type=cache,target=/usr/local/cargo/registry \
+ build_flags=$([ "$RELEASE" = "1" ] && echo "--release"); \
+ if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ if RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build -p barretenberg-api-bin --bin barretenberg-api-bin ${build_flags}; then \
+ sccache --show-stats && \
+ sccache --stop-server; \
+ else \
+ status=$?; \
+ echo "sccache-backed build failed (exit ${status}); retrying without sccache"; \
+ sccache --stop-server || true; \
+ cargo build -p barretenberg-api-bin --bin barretenberg-api-bin ${build_flags}; \
+ fi; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build -p barretenberg-api-bin --bin barretenberg-api-bin ${build_flags}; \
+ fi
+
+RUN cp /build/target/$([ "$RELEASE" = "1" ] && echo "release" || echo "debug")/barretenberg-api-bin /build/target/barretenberg-api-server
+
+
+# Runtime stage dedicated to barretenberg-api-server
+FROM debian:bookworm-slim as runtime
+
+ENV ROOT_DIR /polybase
+WORKDIR $ROOT_DIR
+
+USER root
+
+RUN groupadd -g 1001 --system spaceman && \
+ useradd -u 1001 --system --gid spaceman --home "$ROOT_DIR" spaceman && \
+ chown -R spaceman:spaceman "$ROOT_DIR"
+
+RUN apt update && apt install -y curl nano libpq-dev postgresql wget tar curl
+
+# Download and install barretenberg
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# Enable backports and pull libstdc++ 13.x (exports GLIBCXX_3.4.31)
+# also installs jq, some bb commands require jq
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ # pull only the two runtime libs from testing
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+# Create directories and set permissions for spaceman user
+RUN mkdir -p /tmp /.bb-crs /polybase/.bb-crs /polybase/.polybase/fixtures/params && \
+ chown spaceman:spaceman /tmp /.bb-crs /polybase/.bb-crs /polybase/.polybase/fixtures/params && \
+ chmod 755 /tmp /.bb-crs /polybase/.bb-crs /polybase/.polybase/fixtures/params
+
+COPY --from=builder /build/scripts/download-fixtures-params.sh /usr/local/bin/download-fixtures-params.sh
+RUN chmod +x /usr/local/bin/download-fixtures-params.sh
+
+ARG WAIT_SECONDS=0
+
+ENV WAIT_SECONDS=$WAIT_SECONDS
+
+RUN echo '#!/bin/bash\n\
+ if [ "$WAIT_SECONDS" -gt 0 ]; then\n\
+ echo "Waiting $WAIT_SECONDS seconds before starting..."\n\
+ sleep $WAIT_SECONDS\n\
+ fi\n\
+ exec "$@"' > /entrypoint-wrapper.sh && chmod +x /entrypoint-wrapper.sh
+
+USER spaceman
+
+RUN /usr/local/bin/download-fixtures-params.sh
+
+COPY --from=builder /build/target/barretenberg-api-server /usr/bin/barretenberg-api-server
+
+RUN touch /polybase/.bb-crs/crs.lock && \
+ chown spaceman:spaceman /polybase/.bb-crs/crs.lock
+
+STOPSIGNAL SIGTERM
+
+EXPOSE 9444
+
+ENTRYPOINT ["/entrypoint-wrapper.sh", "/usr/bin/barretenberg-api-server"]
diff --git a/docker/Dockerfile.burn-substitutor b/docker/Dockerfile.burn-substitutor
new file mode 100644
index 0000000..0f626c9
--- /dev/null
+++ b/docker/Dockerfile.burn-substitutor
@@ -0,0 +1,61 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev python3 protobuf-compiler libprotobuf-dev
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build --release --bin burn-substitutor; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build --release --bin burn-substitutor; \
+ fi
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y openssl ca-certificates libpq-dev postgresql curl
+
+COPY --from=builder /build/scripts/download-fixtures-params.sh /usr/local/bin/download-fixtures-params.sh
+RUN chmod +x /usr/local/bin/download-fixtures-params.sh
+
+COPY --from=builder /build/target/release/burn-substitutor /usr/local/bin/burn-substitutor
+
+RUN /usr/local/bin/download-fixtures-params.sh
+
+CMD ["burn-substitutor"]
diff --git a/docker/Dockerfile.db-migrations b/docker/Dockerfile.db-migrations
new file mode 100644
index 0000000..6386dd1
--- /dev/null
+++ b/docker/Dockerfile.db-migrations
@@ -0,0 +1,37 @@
+FROM rust:1-bookworm
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+ENV HOME=${HOME:-/root}
+
+RUN apt update && apt install -y postgresql
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+RUN if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo install diesel_cli --no-default-features --features postgres; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo install diesel_cli --no-default-features --features postgres; \
+ fi
+
+COPY pkg/database/diesel.toml ./pkg/database/diesel.toml
+COPY pkg/database/migrations ./pkg/database/migrations
+
+WORKDIR /pkg/database
+
+# Wait for postgres to be ready and run migrations
+CMD ["sh", "-c", "diesel migration run"]
diff --git a/docker/Dockerfile.eth-node b/docker/Dockerfile.eth-node
new file mode 100644
index 0000000..1c65e28
--- /dev/null
+++ b/docker/Dockerfile.eth-node
@@ -0,0 +1,14 @@
+FROM node:18-bookworm
+
+WORKDIR /eth
+
+COPY eth/package.json eth/yarn.lock ./
+
+RUN yarn install
+
+COPY eth/ ./
+COPY pkg/contracts /pkg/contracts
+
+EXPOSE 8545
+
+CMD ["sh", "-c", "yarn eth-node --hostname 0.0.0.0 & sleep 3 && yarn deploy:local && wait"]
diff --git a/docker/Dockerfile.faucet b/docker/Dockerfile.faucet
new file mode 100644
index 0000000..0da78be
--- /dev/null
+++ b/docker/Dockerfile.faucet
@@ -0,0 +1,37 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ENV HOME=/root
+
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev libpq-dev pkg-config python3 protobuf-compiler libprotobuf-dev cmake ninja-build
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY pkg ./pkg
+
+RUN rustup show
+
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN cargo build --release --bin faucet
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y ca-certificates curl && rm -rf /var/lib/apt/lists/*
+
+COPY --from=builder /build/target/release/faucet /usr/local/bin/faucet
+
+EXPOSE 8080
+
+ENTRYPOINT ["faucet"]
diff --git a/docker/Dockerfile.guild b/docker/Dockerfile.guild
new file mode 100644
index 0000000..d972c00
--- /dev/null
+++ b/docker/Dockerfile.guild
@@ -0,0 +1,104 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+ENV HOME=${HOME:-/root}
+
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev python3 protobuf-compiler libprotobuf-dev cmake ninja-build
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Ensure the toolchain specified in rust-toolchain.toml is installed
+RUN rustup show
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build --release --bin guild; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build --release --bin guild; \
+ fi
+
+FROM debian:bookworm-slim
+
+#Add custom user
+RUN adduser --disabled-password --gecos "" --uid 1001 polybase
+
+RUN apt-get update && apt-get install -y openssl ca-certificates libpq-dev postgresql wget tar curl build-essential pkg-config
+
+# Download and install barretenberg
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# Enable backports and pull libstdc++ 13.x (exports GLIBCXX_3.4.31)
+# also installs jq, some bb commands require jq
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ # pull only the two runtime libs from testing
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+RUN mkdir -p /tmp /.bb-crs && \
+ chmod 1777 /tmp && \
+ chmod 755 /.bb-crs
+
+COPY --from=builder /build/scripts/download-fixtures-params.sh /usr/local/bin/download-fixtures-params.sh
+RUN chmod +x /usr/local/bin/download-fixtures-params.sh
+
+# Add migrations
+COPY pkg/database/diesel.toml ./pkg/database/diesel.toml
+COPY pkg/database/migrations ./pkg/database/migrations
+RUN chown -R polybase ./pkg/
+
+USER polybase
+
+RUN /usr/local/bin/download-fixtures-params.sh
+# Install rust & cargo
+RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --profile minimal --default-toolchain stable --no-modify-path
+ENV PATH="/home/polybase/.cargo/bin:${PATH}"
+# Install diesel for migrations
+RUN cargo install diesel_cli --no-default-features --features postgres
+
+COPY --from=builder /build/target/release/guild /usr/local/bin/guild
+
+CMD ["guild"]
diff --git a/docker/Dockerfile.merge-cli b/docker/Dockerfile.merge-cli
new file mode 100644
index 0000000..f67bede
--- /dev/null
+++ b/docker/Dockerfile.merge-cli
@@ -0,0 +1,90 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev python3 protobuf-compiler libprotobuf-dev
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Add fixtures needed for building dependencies
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build --release --bin merge-cli; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build --release --bin merge-cli; \
+ fi
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y openssl ca-certificates libpq-dev postgresql wget tar curl
+
+# Download and install barretenberg
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# Enable backports and pull libstdc++ 13.x (exports GLIBCXX_3.4.31)
+# also installs jq, some bb commands require jq
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ # pull only the two runtime libs from testing
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+
+RUN mkdir -p /tmp /.bb-crs && \
+ chmod 755 /tmp /.bb-crs
+
+COPY --from=builder /build/scripts/download-fixtures-params.sh /usr/local/bin/download-fixtures-params.sh
+RUN chmod +x /usr/local/bin/download-fixtures-params.sh
+
+COPY --from=builder /build/target/release/merge-cli /usr/local/bin/merge-cli
+
+RUN /usr/local/bin/download-fixtures-params.sh
+
+# Set default environment variables that can be overridden
+ENV DATABASE_URL=postgres://localhost/guild
+ENV NODE_URL=http://localhost:8091/v0
+ENV BURN_EVM_ADDR=0x9A4ebe49A963D3BC5f16639A0ABFF093CA0b040D
+ENV BATCH=10
+
+CMD merge-cli merge-ramps --batch ${BATCH} --burn-evm-address ${BURN_EVM_ADDR}
diff --git a/docker/Dockerfile.observer b/docker/Dockerfile.observer
new file mode 100644
index 0000000..73b8b2b
--- /dev/null
+++ b/docker/Dockerfile.observer
@@ -0,0 +1,70 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev python3 protobuf-compiler libprotobuf-dev
+
+RUN rustup toolchain add nightly-2022-12-10
+RUN rustup component add clippy --toolchain nightly-2022-12-10
+
+# Install Go
+RUN apt-get update && \
+ apt-get install -y wget
+RUN wget https://go.dev/dl/go1.18.linux-amd64.tar.gz
+RUN tar -xvf go1.18.linux-amd64.tar.gz
+RUN mv go /usr/local
+
+# Set environment variables for Go
+ENV GOROOT=/usr/local/go
+ENV GOPATH=$HOME/go
+ENV PATH=$GOPATH/bin:$GOROOT/bin:$PATH
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY pkg ./pkg
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build --release --bin observer; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build --release --bin observer; \
+ fi
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y openssl ca-certificates
+
+COPY --from=builder /build/target/release/observer /usr/local/bin/observer
+
+CMD ["observer"]
diff --git a/docker/Dockerfile.payy-evm b/docker/Dockerfile.payy-evm
new file mode 100644
index 0000000..ed0bb37
--- /dev/null
+++ b/docker/Dockerfile.payy-evm
@@ -0,0 +1,104 @@
+FROM rust:1-bookworm AS builder
+
+ARG SCCACHE_GCS_BUCKET
+ARG SCCACHE_GCS_KEY_PREFIX
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev protobuf-compiler libprotobuf-dev cmake ninja-build pkg-config curl
+
+# Conditional sccache setup: Only if bucket and key are provided
+RUN --mount=type=secret,id=gcs_sa_key_base64,required=false \
+ if [ -n "$SCCACHE_GCS_BUCKET" ] && [ -f /run/secrets/gcs_sa_key_base64 ]; then \
+ cat /run/secrets/gcs_sa_key_base64 | base64 -d > /gcs_key.json && \
+ wget https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ tar -xzf sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ mv sccache-v0.10.0-x86_64-unknown-linux-musl/sccache /usr/local/cargo/bin/sccache && \
+ rm -rf sccache-v0.10.0-x86_64-unknown-linux-musl sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz && \
+ chmod +x /usr/local/cargo/bin/sccache; \
+ fi
+ENV SCCACHE_GCS_KEY_PATH=/gcs_key.json
+ENV SCCACHE_GCS_BUCKET=$SCCACHE_GCS_BUCKET
+ENV SCCACHE_GCS_KEY_PREFIX=$SCCACHE_GCS_KEY_PREFIX
+ENV SCCACHE_GCS_RW_MODE=READ_WRITE
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY scripts ./scripts
+COPY pkg ./pkg
+
+# Ensure the toolchain specified in rust-toolchain.toml is installed
+RUN rustup show
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN --mount=type=cache,target=/usr/local/cargo/registry \
+ if [ -f /gcs_key.json ]; then \
+ echo "Using sccache with GCS"; \
+ RUSTC_WRAPPER=/usr/local/cargo/bin/sccache cargo build --release --bin payy-evm; \
+ sccache --show-stats && \
+ sccache --stop-server; \
+ else \
+ echo "Skipping sccache (missing required vars)"; \
+ cargo build --release --bin payy-evm; \
+ fi
+
+FROM debian:bookworm-slim AS runtime
+
+ENV ROOT_DIR=/data
+ENV HOME=/data
+WORKDIR $ROOT_DIR
+
+RUN groupadd -g 1001 --system payy && \
+ useradd -u 1001 --system --gid payy --home "$ROOT_DIR" payy && \
+ mkdir -p "$ROOT_DIR" && \
+ chown -R payy:payy "$ROOT_DIR"
+
+RUN apt-get update && apt-get install -y curl libpq-dev wget tar ca-certificates
+
+# Download and install barretenberg
+RUN wget https://storage.googleapis.com/payy-public-fixtures/bb/v3.0.0-manual.20251030/barretenberg-amd64-linux.tar.gz -O barretenberg.tar.gz && \
+ echo "88586691621fdbf6105e064aca1b6e4f1f5345f2e75560d1d385693019480697 barretenberg.tar.gz" | sha256sum -c - && \
+ tar -xzf barretenberg.tar.gz && \
+ mv bb /usr/local/bin/bb && \
+ rm barretenberg.tar.gz
+
+# Enable backports and pull libstdc++ 13.x (exports GLIBCXX_3.4.31)
+# also installs jq, some bb commands require jq
+RUN echo 'deb http://deb.debian.org/debian testing main' \
+ > /etc/apt/sources.list.d/testing.list && \
+ echo 'APT::Default-Release "stable";' \
+ > /etc/apt/apt.conf.d/99defaultrelease && \
+ apt-get update && \
+ # pull only the two runtime libs from testing
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get install -y -t testing libc6 libstdc++6 jq
+
+RUN mkdir -p /tmp && \
+ chmod 1777 /tmp
+
+COPY --from=builder /build/scripts/download-fixtures-params.sh /usr/local/bin/download-fixtures-params.sh
+RUN chmod +x /usr/local/bin/download-fixtures-params.sh
+
+COPY --from=builder /build/target/release/payy-evm /usr/bin/payy-evm
+
+USER payy
+
+RUN /usr/local/bin/download-fixtures-params.sh
+
+EXPOSE 8545 8546 30303
+
+ENTRYPOINT ["/usr/bin/payy-evm"]
+CMD ["run"]
+
+VOLUME ["/data"]
diff --git a/docker/Dockerfile.price-cache b/docker/Dockerfile.price-cache
new file mode 100644
index 0000000..2df53b0
--- /dev/null
+++ b/docker/Dockerfile.price-cache
@@ -0,0 +1,37 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ENV HOME=/root
+
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev libpq-dev pkg-config python3 protobuf-compiler libprotobuf-dev cmake ninja-build
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY pkg ./pkg
+
+RUN rustup show
+
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN cargo build --release --bin price-cache
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y ca-certificates libpq-dev curl && rm -rf /var/lib/apt/lists/*
+
+COPY --from=builder /build/target/release/price-cache /usr/local/bin/price-cache
+
+EXPOSE 8073
+
+ENTRYPOINT ["price-cache"]
diff --git a/docker/Dockerfile.providers b/docker/Dockerfile.providers
new file mode 100644
index 0000000..b2a9224
--- /dev/null
+++ b/docker/Dockerfile.providers
@@ -0,0 +1,39 @@
+FROM rust:1-bookworm AS builder
+ARG RUST_GIT_FETCH_CLI
+ENV HOME=/root
+
+ENV CARGO_NET_GIT_FETCH_WITH_CLI=${RUST_GIT_FETCH_CLI:-false}
+
+RUN rustup component add rustfmt && \
+ apt update && apt install -y libglib2.0-dev libssl-dev libclang-dev python3 protobuf-compiler libprotobuf-dev cmake ninja-build
+
+WORKDIR /build
+
+COPY rust-toolchain.toml ./
+COPY .cargo/config.toml .cargo/config.toml
+COPY Cargo.toml ./
+COPY Cargo.lock ./
+COPY pkg ./pkg
+
+# Ensure the toolchain specified in rust-toolchain.toml is installed
+RUN rustup show
+
+# Add fixtures
+COPY eth/artifacts/contracts ./eth/artifacts/contracts
+COPY fixtures/circuits ./fixtures/circuits
+COPY fixtures/params ./fixtures/params
+
+# Remove app package as its not needed
+RUN sed 's|, "app/packages/react-native-rust-bridge/cpp/rustbridge"||g' Cargo.toml > Cargo.toml.tmp \
+ && mv Cargo.toml.tmp Cargo.toml
+
+RUN cargo build --bin providers
+
+FROM debian:bookworm-slim
+
+RUN apt-get update && apt-get install -y openssl ca-certificates libpq-dev postgresql wget tar curl
+
+
+COPY --from=builder /build/target/debug/providers /usr/local/bin/providers
+
+CMD ["providers", "server"]
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
new file mode 100644
index 0000000..18954b9
--- /dev/null
+++ b/docker/docker-compose.yml
@@ -0,0 +1,234 @@
+networks:
+ zk-rollup:
+ driver: bridge
+
+volumes:
+ node-db:
+ smirk-data:
+ prover-db:
+ prover-smirk:
+ pgdata:
+ payy-evm-data:
+
+services:
+ postgres:
+ image: postgres:18
+ restart: unless-stopped
+ environment:
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
+ - PGDATA=/var/lib/postgresql/data/pgdata
+ volumes:
+ - pgdata:/var/lib/postgresql
+ networks:
+ - zk-rollup
+ ports:
+ - "5432:5432"
+ profiles:
+ - dev
+ - test
+ - ci
+
+ db-migrations:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.db-migrations
+ depends_on:
+ - postgres
+ environment:
+ - DATABASE_URL=postgres://postgres:${POSTGRES_PASSWORD:-postgres}@postgres:5432/postgres
+ networks:
+ - zk-rollup
+ restart: "no"
+ profiles:
+ - dev
+ - ci
+
+ eth-node:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.eth-node
+ restart: unless-stopped
+ ports:
+ - "8545:8545"
+ networks:
+ - zk-rollup
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8545"]
+ interval: 5s
+ timeout: 3s
+ retries: 60
+ start_period: 5s
+ profiles:
+ - dev
+ - test
+ - ci
+
+ providers:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.providers
+ restart: unless-stopped
+ ports:
+ - "8072:8072"
+ networks:
+ - zk-rollup
+ profiles:
+ - dev
+ - test
+
+ guild:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.guild
+ restart: unless-stopped
+ depends_on:
+ db-migrations:
+ condition: service_completed_successfully
+ postgres:
+ condition: service_started
+ ports:
+ - "8071:8071"
+ environment:
+ - DATABASE_URL=postgres://postgres:${POSTGRES_PASSWORD:-postgres}@postgres:5432/postgres
+ - HOST=0.0.0.0
+ - PORT=8071
+ - BASE_EVM_RPC_URL=http://eth-node:8545
+ - ROLLUP_CONTRACT_ADDRESS=${ROLLUP_CONTRACT_ADDRESS:-0xdc64a140aa3e981100a9beca4e685f962f0cf6c9}
+ - USDC_CONTRACT_ADDRESS=${USDC_CONTRACT_ADDRESS:-0x5fbdb2315678afecb367f032d93f642f64180aa3}
+ - ACROSS_WITH_AUTHORIZATION_ADDRESS=${ACROSS_WITH_AUTHORIZATION_ADDRESS:-0xb7f8bc63bbcad18155201308c8f3540b07f84f5e}
+ - NODE_RPC_URL=http://node:8091
+ - SKIP_PROOF=true
+ - LOG_LEVEL=INFO
+ - ALFRED_URL=http://providers:8072/alfred
+ - MANTECA_URL=http://providers:8072/manteca
+ - RAIN_URL=http://providers:8072/rain
+ - SUMSUB_URL=http://providers:8072/sumsub
+ networks:
+ - zk-rollup
+ profiles:
+ - dev
+
+ price-cache:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.price-cache
+ restart: unless-stopped
+ depends_on:
+ db-migrations:
+ condition: service_completed_successfully
+ postgres:
+ condition: service_started
+ ports:
+ - "8073:8073"
+ environment:
+ - DATABASE_URL=postgres://postgres:${POSTGRES_PASSWORD:-postgres}@postgres:5432/postgres
+ - HOST=0.0.0.0
+ - PORT=8073
+ - ALCHEMY_API_KEY=${ALCHEMY_API_KEY:-demo}
+ - TOKEN_WHITELIST=${TOKEN_WHITELIST:-ETH}
+ - CURRENCIES=${CURRENCIES:-usd}
+ networks:
+ - zk-rollup
+ profiles:
+ - dev
+
+ node:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.node
+ args:
+ - WAIT_SECONDS=180
+ restart: unless-stopped
+ depends_on:
+ eth-node:
+ condition: service_healthy
+ ports:
+ - "8091:8091"
+ - "26656:26656"
+ volumes:
+ - node-db:/data/db
+ - smirk-data:/data/smirk
+ environment:
+ POLY_ETH_RPC_URL: "http://eth-node:8545"
+ networks:
+ - zk-rollup
+ profiles:
+ - dev
+ - test
+
+ prover:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.node
+ args:
+ - WAIT_SECONDS=180
+ restart: unless-stopped
+ depends_on:
+ eth-node:
+ condition: service_healthy
+ command:
+ [
+ "--mode",
+ "mock-prover",
+ "--db-path",
+ "/polybase/.polybase-prover/db",
+ "--smirk-path",
+ "/polybase/.polybase-prover/smirk",
+ "--rpc-laddr",
+ "0.0.0.0:8092",
+ "--p2p-laddr",
+ "/ip4/0.0.0.0/tcp/5001",
+ ]
+ ports:
+ - "8092:8092"
+ - "5001:5001"
+ volumes:
+ - prover-db:/polybase/.polybase-prover/db
+ - prover-smirk:/polybase/.polybase-prover/smirk
+ environment:
+ POLY_ETH_RPC_URL: "http://eth-node:8545"
+ networks:
+ - zk-rollup
+ profiles:
+ - prover
+
+ payy-evm:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile.payy-evm
+ restart: unless-stopped
+ command:
+ [
+ "run",
+ "--mode",
+ "sequencer",
+ "--network",
+ "dev",
+ "--block-time",
+ "300ms",
+ "--datadir",
+ "/data",
+ "--http",
+ "--http.addr",
+ "0.0.0.0",
+ "--http.port",
+ "8545",
+ "--http.corsdomain",
+ "*",
+ "--ws",
+ "--ws.addr",
+ "0.0.0.0",
+ "--ws.port",
+ "8546",
+ ]
+ ports:
+ - "18545:8545"
+ - "18546:8546"
+ volumes:
+ - payy-evm-data:/data
+ environment:
+ - LOG_LEVEL=INFO
+ networks:
+ - zk-rollup
+ profiles:
+ - dev
diff --git a/docs/public/protocol/privacy-layer/zk-circuits.md b/docs/public/protocol/privacy-layer/zk-circuits.md
index cb5b097..f2e29a7 100644
--- a/docs/public/protocol/privacy-layer/zk-circuits.md
+++ b/docs/public/protocol/privacy-layer/zk-circuits.md
@@ -1,15 +1,14 @@
# ZK Circuits
-There are three privacy ZK circuits:
+The [PrivacyBridge](../privacybridge.md) interface methods accept the following ZK circuits as proofs:
-1. [**Utxo proof**](https://github.com/polybase/payy/tree/next/noir/utxo) (client/Privacy Vault) - runs on the client or [Privacy Vault](../privacy-vault.md) and proves that a user has permission to spend an input note and generate an output note.
-2. [**Utxo aggregation and inclusion proof**](https://github.com/polybase/payy/tree/next/noir/agg_utxo) (prover) - aggregates Utxo proofs and verifies the new merkle root state
-3. [**Aggregation proof**](https://github.com/polybase/payy/tree/next/noir/agg_agg) (prover) - aggregates Utxo aggregation and inclusion proofs recursively to the required depth to include all Utxo proofs from a single block
-
-The privacy aggregation proof is then combined with the EVM Layer ZK verifier proof, ready for rollup submission to Ethereum.
+- [`transfer`](https://github.com/polybase/payy/tree/main/noir/evm/transfer) - internal transfer within the privacy pool
+- [`burn`](https://github.com/polybase/payy/tree/main/noir/evm/burn) - withdraw from the privacy pool
+- [`mint`](https://github.com/polybase/payy/tree/main/noir/evm/mint) - deposit into the privacy pool
+- [`erc20_transfer`](https://github.com/polybase/payy/tree/main/noir/evm/erc20_transfer) - ERC-20 transfer proof (transparent upgrade using an standard ERC-20 transfer signature)
{% include "../../../../.gitbook/includes/zk-framework.md" %}
-The following diagram shows the ZK circuits used by the Privacy Layer:
+## Manual proof construction
-
+When using the [@payy/client](../../build-on-payy/get-started.md), the client will construct the proofs for you. If you are constructing PrivacyBridge ZK proofs client-side without the Payy SDK, you must use [`@aztec/bb.js` version `3.0.0-manual.20251030`](https://www.npmjs.com/package/@aztec/bb.js/v/3.0.0-manual.20251030) for manual proof generation, with the above ZK circuits.
diff --git a/docs/public/protocol/privacybridge.md b/docs/public/protocol/privacybridge.md
index cd1a2cc..192c65a 100644
--- a/docs/public/protocol/privacybridge.md
+++ b/docs/public/protocol/privacybridge.md
@@ -8,6 +8,8 @@ The bridge verifies privacy proofs through the [Privacy Proof Verify](precompile
All calls to the PrivacyBridge are gas zero rated to enable [zero fee private payments](../stablecoins/zero-fee-payments.md).
{% endhint %}
+If you need to construct PrivacyBridge ZK proofs manually outside the Payy SDK, see the [Manual proof construction](privacy-layer/zk-circuits.md#manual-proof-construction) section in [ZK Circuits](privacy-layer/zk-circuits.md) for the required `@aztec/bb.js` version and circuit source links.
+
```solidity
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
diff --git a/pkg/beam-cli/Cargo.toml b/pkg/beam-cli/Cargo.toml
new file mode 100644
index 0000000..5a099b5
--- /dev/null
+++ b/pkg/beam-cli/Cargo.toml
@@ -0,0 +1,46 @@
+[package]
+name = "beam-cli"
+version = "0.1.0"
+edition = "2024"
+publish = false
+
+[[bin]]
+name = "beam"
+path = "src/main.rs"
+
+[dependencies]
+argon2 = { workspace = true }
+async-trait = { workspace = true }
+clap = { workspace = true }
+contextful = { workspace = true }
+contracts = { workspace = true }
+dirs = { workspace = true }
+encrypt = { workspace = true }
+eth-util = { workspace = true }
+futures = { workspace = true }
+hex = { workspace = true }
+json-store = { workspace = true }
+num-bigint = { workspace = true }
+rand = { workspace = true }
+reqwest = { workspace = true }
+rlp = { workspace = true }
+rpassword = { workspace = true }
+rustyline = { workspace = true }
+secp256k1 = { workspace = true }
+self-replace = { workspace = true }
+semver = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+serde_yaml = { workspace = true }
+shlex = { workspace = true }
+sha2 = { workspace = true }
+thiserror = { workspace = true }
+tempfile = { workspace = true }
+tokio = { workspace = true }
+web3 = { workspace = true }
+workspace-hack.workspace = true
+
+[dev-dependencies]
+insta = { workspace = true }
+mockito = { workspace = true }
+serial_test = { workspace = true }
diff --git a/pkg/beam-cli/README.md b/pkg/beam-cli/README.md
new file mode 100644
index 0000000..26cee80
--- /dev/null
+++ b/pkg/beam-cli/README.md
@@ -0,0 +1,540 @@
+# beam
+
+`beam` is a Rust CLI for day-to-day EVM wallet work. It covers encrypted local wallets,
+multi-chain RPC defaults, native asset transfers, ERC20 operations, arbitrary contract
+calls, an interactive REPL, and GitHub Releases based self-updates.
+
+The defaults and chain presets are tuned for Payy workflows.
+
+## Install
+
+Install the latest public release:
+
+```bash
+curl -L https://beam.payy.network | bash
+```
+
+Install a specific version:
+
+```bash
+curl -L https://beam.payy.network | bash -s -- 0.1.0
+```
+
+The installer downloads the correct binary for:
+
+- Linux `x86_64`
+- macOS `x86_64`
+- macOS `aarch64`
+
+Before installing, the script selects the newest stable release that includes the current
+platform asset with a valid GitHub Release SHA-256 digest, then verifies the downloaded
+binary against that digest and aborts on any mismatch.
+
+Local development install:
+
+```bash
+cargo run -p beam-cli -- --help
+```
+
+## Quick Start
+
+Create a wallet and make it the default sender:
+
+```bash
+beam wallets create
+beam wallets list
+```
+
+Check tracked balances for your default wallet on Ethereum:
+
+```bash
+beam balance
+```
+
+`beam balance` always lists the native token first and then every tracked ERC20 for the
+selected chain. Use `--from ` to change which owner address the
+balances are loaded from.
+
+Wallet/address selectors accept a stored wallet name, a raw `0x...` address, or an ENS name
+such as `alice.eth`. Beam first checks stored wallet names, then resolves `.eth` inputs
+through ENS.
+
+Switch to Base for a single command:
+
+```bash
+beam --chain base balance
+```
+
+Send native gas token:
+
+```bash
+beam --chain sepolia --from alice transfer 0x1111111111111111111111111111111111111111 0.01
+```
+
+Check an ERC20 balance:
+
+```bash
+beam --chain base balance USDC
+beam --chain base balance 0x833589fcd6edb6e08f4c7c32d4f71b54bda02913
+```
+
+List and manage tracked tokens:
+
+```bash
+beam tokens
+beam tokens add 0x833589fcd6edb6e08f4c7c32d4f71b54bda02913
+beam tokens add 0x0000000000000000000000000000000000000bee BEAMUSD
+beam tokens remove USDC
+```
+
+Run an arbitrary contract call:
+
+```bash
+beam call 0xA0b86991c6218b36c1d19d4a2e9eb0ce3606eb48 "balanceOf(address):(uint256)" 0x1111111111111111111111111111111111111111
+```
+
+Inspect a transaction or block:
+
+```bash
+beam txn 0xabc123...
+beam block latest
+```
+
+Start the interactive REPL:
+
+```bash
+beam
+```
+
+Commands that hit the network show a loading spinner in the default terminal output. In the
+REPL, press `Ctrl-C` to cancel an in-flight request and return to the prompt without exiting
+the session.
+
+Write commands stop waiting automatically and return a `dropped` state if the active RPC stops
+reporting the submitted transaction for roughly 60 seconds.
+
+## Wallets
+
+Wallets are stored in an encrypted local keystore at `~/.beam/wallets.json`.
+
+Supported wallet commands:
+
+```bash
+beam wallets create [name]
+beam wallets import [--name ] [--private-key-stdin | --private-key-fd ]
+beam wallets list
+beam wallets rename
+beam wallets address [--private-key-stdin | --private-key-fd ]
+beam wallets use
+```
+
+Notes:
+
+- Private keys are encrypted before they are written to disk.
+- Each wallet record stores its KDF metadata alongside the encrypted key so future beam releases can keep decrypting older wallets after Argon2 tuning changes.
+- `beam wallets import` and `beam wallets address` read the private key from a hidden prompt by default.
+- Use `--private-key-stdin` for pipelines and `--private-key-fd ` for redirected file descriptors.
+- `beam wallets create` prompts for a wallet name when you omit `[name]`, suggesting the next available `wallet-N` alias and accepting it when you press Enter.
+- `beam wallets import` uses a verified ENS reverse record as the default wallet name when one resolves back to the imported address; otherwise it falls back to the next `wallet-N` alias.
+- The CLI prompts for a password when creating/importing a wallet and rejects empty or whitespace-only values.
+- Beam trims surrounding whitespace and sanitizes terminal control characters in wallet names, rejecting aliases that become empty after normalization.
+- Commands that need signing prompt for the keystore password again before decrypting.
+- If `wallets.json` contains invalid JSON, `beam` fails closed and will not rewrite the file until you repair or restore it.
+- Before signing, Beam re-derives the decrypted wallet address and rejects any keystore entry whose key does not match the stored address.
+- Wallet names cannot start with `0x`, because that prefix is reserved for raw addresses.
+- Wallet names ending in `.eth` must resolve through ENS to that wallet's address before beam accepts them.
+- ENS lookups always use the configured Ethereum RPC, and beam rejects that endpoint for ENS if it does not report chain id `1`.
+- `--from ` selects a sender for a single command.
+- For signing commands, `--from` must still resolve to a wallet stored in the local keystore, even when you pass a raw address or ENS name.
+
+Examples:
+
+```bash
+beam wallets import --name alice
+beam wallets rename alice primary
+printf '%s\n' "$BEAM_PRIVATE_KEY" | beam wallets import --private-key-stdin --name alice
+beam wallets address --private-key-fd 3 3< ~/.config/beam/private-key.txt
+```
+
+The signing flow is built on a `Signer` abstraction so hardware-wallet implementations can
+be added later without changing command handlers.
+
+## Chains
+
+`beam` ships with built-in presets for:
+
+- Ethereum (`1`)
+- Base (`8453`)
+- Polygon (`137`)
+- BNB (`56`)
+- Arbitrum (`42161`)
+- Payy Testnet (`7298`)
+- Payy Dev (`7297`)
+- Sepolia (`11155111`)
+- Hardhat (`1337`)
+
+The built-in mainnet and testnet presets default to public RPC endpoints that do not require
+an API key. You can still override them per command with `--rpc` or persist a different
+default with `beam rpc use`.
+
+Select a chain by name or chain id:
+
+```bash
+beam --chain base balance
+beam --chain 8453 balance
+```
+
+Per-invocation overrides:
+
+- `--chain `
+- `--rpc `
+- `--from `
+
+List chains and RPCs:
+
+```bash
+beam chains list
+beam rpc list
+beam --chain base rpc list
+```
+
+Set the default chain:
+
+```bash
+beam chains use base
+```
+
+Add a custom chain:
+
+```bash
+beam chains add "Beam Dev" https://beam.example/dev --chain-id 31337 --native-symbol BEAM
+```
+
+If you omit the chain name or RPC URL, `beam chains add` prompts for them interactively. When
+`--chain-id` is omitted, beam reads the chain id from the RPC endpoint before saving the chain.
+When `--chain-id` is provided, beam still verifies that the RPC endpoint reports the same
+chain id before persisting the chain. Custom names are trimmed and sanitized for terminal
+control characters before they are stored, and they must not reuse an existing selector,
+including builtin aliases like `eth`/`bsc` or numeric ids like `1`.
+
+Manage RPCs for the selected chain (either `--chain ` or the configured default chain):
+
+```bash
+beam --chain base rpc add https://beam.example/base-backup
+beam --chain base rpc use https://beam.example/base-backup
+beam --chain base rpc remove https://beam.example/base-backup
+```
+
+Custom chain metadata is stored in `~/.beam/chains.json`. Global defaults and per-chain RPC
+configuration live in `~/.beam/config.json`.
+
+Beam validates RPC URLs before running a command, so malformed values from `--rpc`,
+`config.json`, or `beam chains add` fail with a normal CLI error instead of crashing.
+
+## ERC20 Defaults
+
+`beam` preloads known token metadata into `~/.beam/config.json` on first run and also keeps a
+per-chain tracked-token list for `beam balance` and `beam tokens`.
+
+Built-in labels:
+
+- `USDC`
+- `USDT`
+
+You can use a label or a raw token address with balance and ERC20 commands:
+
+```bash
+beam --chain base balance USDC
+beam erc20 transfer 0xTokenAddress 0xRecipient 25
+beam erc20 approve USDT 0xSpender 1000
+beam tokens add 0xTokenAddress
+```
+
+Beam rejects decimal precisions above `77` when converting human-readable values into
+on-chain integer units, so hostile token metadata or oversized manual `--decimals`
+input fails with a normal CLI validation error instead of crashing.
+
+## Utility Commands
+
+`beam util` exposes the pure/local cast-style helpers that do not require Beam config,
+wallets, RPCs, OpenChain, or Etherscan. The command runs as a standalone path, so it works
+even when `~/.beam` has not been initialized.
+
+Examples:
+
+```bash
+beam util sig "transfer(address,uint256)"
+beam util calldata "transfer(address,uint256)" 0x1111111111111111111111111111111111111111 5
+beam util abi-encode-event "Transfer(address indexed,address indexed,uint256)" \
+ 0x1111111111111111111111111111111111111111 \
+ 0x2222222222222222222222222222222222222222 \
+ 5
+beam util to-wei 1 gwei
+beam util from-wei 1000000000 gwei
+beam util index address 0x1111111111111111111111111111111111111111 1
+beam util create2 --deployer 0x0000000000000000000000000000000000000000 \
+ --salt 0x0000000000000000000000000000000000000000000000000000000000000000 \
+ --init-code 0x00
+```
+
+Supported `beam util` subcommands:
+
+- ABI and calldata: `abi-encode`, `abi-encode-event`, `calldata`, `decode-abi`,
+ `decode-calldata`, `decode-error`, `decode-event`, `decode-string`, `pretty-calldata`,
+ `sig`, `sig-event`
+- Bytes and text: `address-zero`, `concat-hex`, `format-bytes32-string`, `from-bin`,
+ `from-utf8`, `hash-zero`, `pad`, `parse-bytes32-address`, `parse-bytes32-string`,
+ `to-ascii`, `to-bytes32`, `to-check-sum-address`, `to-hexdata`, `to-utf8`
+- Units and number transforms: `format-units`, `from-fixed-point`, `from-wei`, `max-int`,
+ `max-uint`, `min-int`, `parse-units`, `shl`, `shr`, `to-base`, `to-dec`,
+ `to-fixed-point`, `to-hex`, `to-int256`, `to-uint256`, `to-unit`, `to-wei`
+- Hashing, storage, and address derivation: `compute-address`, `create2`, `hash-message`,
+ `index`, `index-erc7201`, `keccak`, `namehash`
+- RLP: `from-rlp`, `to-rlp`
+
+Several helpers also accept stdin when you omit the positional value, so shell pipelines map
+cleanly onto `beam util`.
+
+## Command Reference
+
+Top-level commands:
+
+```bash
+beam wallets
+beam util
+beam chains list
+beam chains add [name] [rpc] [--chain-id ] [--native-symbol ]
+beam chains remove
+beam chains use
+beam rpc list [--chain ]
+beam [--chain ] rpc add [rpc]
+beam [--chain ] rpc remove
+beam [--chain ] rpc use
+beam [--chain ] tokens [list]
+beam [--chain ] tokens add [token|token-address] [label] [--decimals ]
+beam [--chain ] tokens remove
+beam [--chain ] [--from ] balance [token|token-address]
+beam transfer
+beam txn
+beam block [latest|pending|safe|finalized||]
+beam erc20 balance [name|address|ens]
+beam erc20 transfer
+beam erc20 approve
+beam call [args...]
+beam send [--value ] [args...]
+beam update
+```
+
+Useful examples:
+
+```bash
+beam --output json balance
+beam --from alice balance USDC
+beam tokens
+beam --chain base tokens add 0xTokenAddress
+beam chains list
+beam --chain base rpc list
+beam --chain arbitrum erc20 balance USDT
+beam txn 0xTransactionHash
+beam block 21000000
+beam send 0xContract "approve(address,uint256)" 0xSpender 1000000
+beam send --value 0.01 0xContract "deposit()"
+beam call 0xContract "symbol():(string)"
+```
+
+Function signatures use standard ABI signature syntax. For read-only calls, include output
+types when you want decoded output, for example:
+
+```bash
+beam call 0xContract "name():(string)"
+beam call 0xContract "getReserves():(uint112,uint112,uint32)"
+```
+
+Write commands wait indefinitely for a mined receipt by default. After Beam has submitted the
+transaction, the default terminal loader updates with the transaction hash and pending/mined
+status. Press `Ctrl-C` to stop waiting without losing the transaction hash; Beam prints the
+submitted hash (and any known block number) so you can keep tracking it with `beam txn` or
+`beam block`.
+
+Use `--value` with `beam send` to attach native token to payable contract methods, for
+example `beam send --value 0.01 0xContract "deposit()"`.
+
+In the default terminal output mode, RPC-backed commands show a loader while requests are in
+flight. Press `Ctrl-C` during a read-only RPC loader to cancel the in-flight request; in the
+REPL Beam returns to the prompt, and in one-shot CLI invocations Beam exits with the standard
+interrupt status. Successful write commands print the confirmed transaction hash and block so
+you can verify the result immediately, while interrupted waits still print the submitted hash.
+
+## Interactive Mode
+
+Running `beam` with no args opens a REPL with history, faded autosuggestions, and tab
+completion.
+
+Interactive commands:
+
+```text
+wallets
+chains
+rpc
+balance
+tokens
+help
+exit
+```
+
+Slash-prefixed REPL aliases are not supported. Use bare shortcuts like `wallets ` or
+the normal clap command forms such as `wallets create ...` / `beam wallets create ...`.
+
+The REPL also accepts the normal `beam` command set, including flags, nested subcommands,
+and clap help output. You can enter those commands either as `transfer ...` / `wallets create`
+or with a leading `beam`, and the current wallet, chain, and RPC selections are used as
+defaults unless you override them on that command. Interactive startup flags such as
+`--chain`, `--from`, and `--rpc` only seed that initial session state. If you later change
+the selected wallet, chain, or current chain RPC through a normal CLI subcommand, Beam
+reconciles the in-memory REPL selection before rendering the next prompt so renamed or
+removed selectors fall back cleanly instead of killing the session. If you later change
+chains, Beam falls back to the newly selected chain's configured RPC unless you also choose
+another RPC for that chain. The `help` shortcut prints the full CLI help text plus the
+REPL-only `exit` command, and both tab completion and inline suggestions follow the same
+command tree while also surfacing matching history values. When you have typed part of a
+command, `Up` / `Down` search only history entries with that prefix; on an empty prompt they
+cycle through previously submitted commands.
+The `balance` shortcut prints the full tracked-token report for the current session owner, and
+the regular CLI form still handles one-off selectors such as `balance USDC` or `tokens add ...`.
+When a write command is waiting on-chain, `Ctrl-C` stops the wait, prints the submitted
+transaction hash, and returns you to the REPL instead of exiting Beam. Use `Ctrl-D` or `exit`
+to leave interactive mode.
+
+The prompt shows the active wallet alias (or raw address override), a shortened address,
+the active chain, and the current RPC endpoint.
+The chain segment is tinted per network brand in color-capable terminals, and all Payy
+networks use `#E0FF32`.
+
+Sensitive wallet commands are never written to REPL history, and startup immediately rewrites
+`~/.beam/history.txt` after scrubbing previously persisted `wallets import` / `wallets address`
+entries, including mistyped slash-prefixed variants such as `/wallets import`.
+
+Interactive startup only reads the cached update status. If a previous background refresh
+found a newer GitHub Release, `beam` prints a warning before entering the REPL and refreshes
+that cache again in the background when the last GitHub check is older than 24 hours.
+
+If you run `update` from the REPL, beam always relaunches itself after a successful
+self-update so you are immediately running the new binary. When the current session still
+matches the original startup flags, beam reuses them; otherwise it falls back to a plain
+`beam` restart.
+
+## Configuration
+
+Default files:
+
+- `~/.beam/config.json`
+- `~/.beam/chains.json`
+- `~/.beam/wallets.json`
+- `~/.beam/history.txt`
+- `~/.beam/update-status.json`
+
+To relocate all beam state, set `BEAM_HOME`:
+
+```bash
+BEAM_HOME=/tmp/beam beam wallets list
+```
+
+`config.json` fields:
+
+- `default_chain`
+- `default_wallet`
+- `known_tokens`
+- `tracked_tokens`
+- `rpc_configs` with the configured RPC URLs and default RPC for each chain
+
+`chains.json` stores custom chain metadata added through `beam chains add`.
+
+Selecting a different chain uses that chain's configured RPC unless you also pass `--rpc`
+or set `rpc` in the REPL. In interactive mode, changing the session chain clears any prior
+session RPC override so the prompt and subsequent commands stay on the selected network.
+
+`beam` also supports structured output modes for scripting:
+
+- `--output default`
+- `--output json`
+- `--output yaml`
+- `--output markdown`
+- `--output compact`
+- `--output quiet`
+
+Human-facing warnings, errors, and the interactive prompt use color automatically when beam is
+writing to a terminal. Override that behavior with `--color auto`, `--color always`, or
+`--color never`.
+
+Non-interactive update notices are only printed in `default` output mode and use the cached
+update status instead of waiting on GitHub before the command runs.
+
+## Self-Updates
+
+Use:
+
+```bash
+beam update
+```
+
+The command checks the public `polybase/payy` GitHub Releases feed, selects the newest
+stable release that includes a matching binary for the current platform with a valid
+GitHub Release SHA-256 digest, downloads that asset, verifies the digest, and only then
+swaps the running executable in place.
+
+`beam update` bypasses the normal Beam state bootstrap, so it still reaches the public
+GitHub Releases feed even when local `config.json`, `chains.json`, or `wallets.json` need
+repair.
+
+Normal startup and non-update commands do not wait on GitHub. They refresh
+`update-status.json` asynchronously at most once every 24 hours, and `beam update` remains
+the only command that requires the live release check to finish before proceeding.
+
+Release tags use the `beam-v` format and publish assets named:
+
+- `beam-x86_64-unknown-linux-gnu`
+- `beam-x86_64-apple-darwin`
+- `beam-aarch64-apple-darwin`
+
+The public installer and `beam update` only consider non-draft, non-prerelease
+`beam-v` releases from `polybase/payy`, and they only select a release when it
+contains the current platform asset with a valid `sha256:` digest. Other repository release
+trains do not affect Beam downloads.
+
+The release workflow only publishes a given `beam-v` tag once. If that tag already
+exists, reruns skip publication rather than replacing assets, so cut a new Beam version
+before triggering another public release.
+
+## Serving `beam.payy.network`
+
+`beam.payy.network` should serve `scripts/install-beam.sh` as the public installer entrypoint.
+
+One straightforward setup is:
+
+1. Publish `scripts/install-beam.sh` to a static host such as GitHub Pages.
+2. Configure the host to serve the script at `/`.
+3. Point the `beam.payy.network` DNS record at that static host.
+4. Keep the script in sync with the current public GitHub Releases asset naming scheme.
+
+The release workflow lives in the internal repo but is mirrored into `polybase/payy` via
+Copybara so the public repo can publish the assets that `beam update` and the installer
+consume.
+
+If you use GitHub Pages, a simple `CNAME` record from `beam.payy.network` to the Pages host
+is enough as long as the root URL responds with the installer script body.
+
+## Development
+
+From the repository root:
+
+```bash
+cargo check -p beam-cli
+cargo test -p beam-cli
+```
+
+Full workspace verification is still required before merging:
+
+```bash
+cargo xtask lint
+cargo xtask test
+```
diff --git a/pkg/beam-cli/src/abi.rs b/pkg/beam-cli/src/abi.rs
new file mode 100644
index 0000000..1dcfcc4
--- /dev/null
+++ b/pkg/beam-cli/src/abi.rs
@@ -0,0 +1,230 @@
+// lint-long-file-override allow-max-lines=260
+use contextful::ResultContextExt;
+use serde_json::{Value, json};
+use web3::ethabi::{
+ Function, Param, ParamType, StateMutability, Token,
+ ethereum_types::U256,
+ param_type::Reader,
+ token::{LenientTokenizer, Tokenizer},
+};
+
+use crate::error::{Error, Result};
+
+pub fn parse_function(signature: &str, state_mutability: StateMutability) -> Result {
+ let signature = signature.trim();
+ let (input_signature, output_signature) = split_signature(signature)?;
+ let open = input_signature
+ .find('(')
+ .ok_or_else(|| Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ })?;
+ let name = input_signature[..open].trim();
+
+ if name.is_empty() {
+ return Err(Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ });
+ }
+
+ let inputs = param_list(
+ &input_signature[open + 1..input_signature.len() - 1],
+ "arg",
+ signature,
+ )?;
+ let outputs = match output_signature {
+ Some(output_signature) => param_list(
+ &output_signature[1..output_signature.len() - 1],
+ "out",
+ signature,
+ )?,
+ None => Vec::new(),
+ };
+
+ #[allow(deprecated)]
+ let function = Function {
+ name: name.to_string(),
+ inputs,
+ outputs,
+ constant: None,
+ state_mutability,
+ };
+
+ Ok(function)
+}
+
+pub fn encode_input(function: &Function, args: &[String]) -> Result> {
+ let tokens = tokenize_args(function, args)?;
+ let data = function
+ .encode_input(&tokens)
+ .context("encode beam abi input")?;
+ Ok(data)
+}
+
+pub fn decode_output(function: &Function, data: &[u8]) -> Result> {
+ if function.outputs.is_empty() {
+ return Ok(Vec::new());
+ }
+
+ let tokens = function
+ .decode_output(data)
+ .context("decode beam abi output")?;
+ Ok(tokens)
+}
+
+pub fn tokens_to_json(tokens: &[Token]) -> Value {
+ Value::Array(tokens.iter().map(token_to_json).collect())
+}
+
+fn split_signature(signature: &str) -> Result<(String, Option)> {
+ let input_close = input_close_index(signature)?;
+ let input = signature[..=input_close].to_string();
+ let rest = signature[input_close + 1..].trim();
+
+ if rest.is_empty() {
+ return Ok((input, None));
+ }
+
+ let rest = rest.strip_prefix(':').unwrap_or(rest).trim();
+ if rest.starts_with('(') && rest.ends_with(')') {
+ return Ok((input, Some(rest.to_string())));
+ }
+
+ Err(Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ })
+}
+
+fn input_close_index(signature: &str) -> Result {
+ let open = signature
+ .find('(')
+ .ok_or_else(|| Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ })?;
+ let mut depth = 0usize;
+
+ for (index, ch) in signature.char_indices().skip(open) {
+ match ch {
+ '(' => depth += 1,
+ ')' => {
+ depth -= 1;
+ if depth == 0 {
+ return Ok(index);
+ }
+ }
+ _ => {}
+ }
+ }
+
+ Err(Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ })
+}
+
+fn param_list(list: &str, prefix: &str, signature: &str) -> Result> {
+ let types = if list.trim().is_empty() {
+ Vec::new()
+ } else {
+ tuple_items(list, signature)?
+ };
+
+ Ok(types
+ .into_iter()
+ .enumerate()
+ .map(|(index, kind)| Param {
+ name: format!("{prefix}{index}"),
+ kind,
+ internal_type: None,
+ })
+ .collect())
+}
+
+fn tokenize_args(function: &Function, args: &[String]) -> Result> {
+ if function.inputs.len() != args.len() {
+ return Err(Error::InvalidArgumentCount {
+ expected: function.inputs.len(),
+ got: args.len(),
+ });
+ }
+
+ function
+ .inputs
+ .iter()
+ .zip(args)
+ .map(|(param, arg)| tokenize_param(¶m.kind, arg))
+ .collect()
+}
+
+fn tuple_items(list: &str, signature: &str) -> Result> {
+ let type_string = format!("({})", list.replace(' ', ""));
+ let tuple = read_param_type(&type_string, signature)?;
+
+ match tuple {
+ ParamType::Tuple(items) => Ok(items),
+ _ => unreachable!(),
+ }
+}
+
+pub(crate) fn read_param_type(kind: &str, signature: &str) -> Result {
+ Reader::read(kind).map_err(|_| Error::InvalidFunctionSignature {
+ signature: signature.to_string(),
+ })
+}
+
+pub(crate) fn tokenize_param(kind: &ParamType, value: &str) -> Result {
+ LenientTokenizer::tokenize(kind, value).map_err(|_| invalid_abi_argument(kind, value))
+}
+
+fn invalid_abi_argument(kind: &ParamType, value: &str) -> Error {
+ match kind {
+ ParamType::Address => Error::InvalidAddress {
+ value: value.to_string(),
+ },
+ ParamType::Uint(_) | ParamType::Int(_) => Error::InvalidNumber {
+ value: value.to_string(),
+ },
+ ParamType::Bytes | ParamType::FixedBytes(_) => Error::InvalidHexData {
+ value: value.to_string(),
+ },
+ ParamType::Bool => Error::InvalidAbiArgument {
+ kind: "bool".to_string(),
+ value: value.to_string(),
+ },
+ ParamType::String => Error::InvalidAbiArgument {
+ kind: "string".to_string(),
+ value: value.to_string(),
+ },
+ ParamType::Array(_) | ParamType::FixedArray(_, _) => Error::InvalidAbiArgument {
+ kind: "array".to_string(),
+ value: value.to_string(),
+ },
+ ParamType::Tuple(_) => Error::InvalidAbiArgument {
+ kind: "tuple".to_string(),
+ value: value.to_string(),
+ },
+ }
+}
+
+pub fn token_to_json(token: &Token) -> Value {
+ match token {
+ Token::Address(address) => json!(format!("{address:#x}")),
+ Token::FixedBytes(bytes) | Token::Bytes(bytes) => {
+ json!(format!("0x{}", hex::encode(bytes)))
+ }
+ Token::Int(value) => json!(format_signed_int(value)),
+ Token::Uint(value) => json!(value.to_string()),
+ Token::Bool(value) => json!(value),
+ Token::String(value) => json!(value),
+ Token::FixedArray(items) | Token::Array(items) | Token::Tuple(items) => {
+ Value::Array(items.iter().map(token_to_json).collect())
+ }
+ }
+}
+
+fn format_signed_int(value: &U256) -> String {
+ if !value.bit(255) {
+ return value.to_string();
+ }
+
+ let magnitude = (!*value).overflowing_add(U256::from(1u8)).0;
+ format!("-{magnitude}")
+}
diff --git a/pkg/beam-cli/src/chains.rs b/pkg/beam-cli/src/chains.rs
new file mode 100644
index 0000000..ac9e3ae
--- /dev/null
+++ b/pkg/beam-cli/src/chains.rs
@@ -0,0 +1,236 @@
+// lint-long-file-override allow-max-lines=300
+use std::{collections::BTreeMap, path::Path};
+
+use contextful::ResultContextExt;
+use contracts::Client;
+use json_store::{FileAccess, InvalidJsonBehavior, JsonStore};
+use serde::{Deserialize, Serialize};
+
+use crate::error::{Error, Result};
+
+const ETHEREUM_RPC_URL: &str = "https://ethereum-rpc.publicnode.com";
+const BASE_RPC_URL: &str = "https://base-rpc.publicnode.com";
+const POLYGON_RPC_URL: &str = "https://polygon-bor-rpc.publicnode.com";
+const BNB_RPC_URL: &str = "https://bsc-rpc.publicnode.com";
+const ARBITRUM_RPC_URL: &str = "https://arbitrum-one-rpc.publicnode.com";
+const PAYY_TESTNET_RPC_URL: &str = "https://rpc.testnet.payy.network";
+const PAYY_DEV_RPC_URL: &str = "http://127.0.0.1:8546";
+const SEPOLIA_RPC_URL: &str = "https://ethereum-sepolia-rpc.publicnode.com";
+
+type BuiltinChainSpec = (
+ &'static str,
+ &'static str,
+ u64,
+ &'static str,
+ &'static str,
+ &'static [&'static str],
+);
+
+const BUILTIN_CHAINS: [BuiltinChainSpec; 9] = [
+ ("ethereum", "Ethereum", 1, "ETH", ETHEREUM_RPC_URL, &["eth"]),
+ ("base", "Base", 8453, "ETH", BASE_RPC_URL, &[]),
+ ("polygon", "Polygon", 137, "MATIC", POLYGON_RPC_URL, &[]),
+ ("bnb", "BNB", 56, "BNB", BNB_RPC_URL, &["bsc", "binance"]),
+ (
+ "arbitrum",
+ "Arbitrum",
+ 42161,
+ "ETH",
+ ARBITRUM_RPC_URL,
+ &["arb"],
+ ),
+ (
+ "payy-testnet",
+ "Payy Testnet",
+ 7298,
+ "PUSD",
+ PAYY_TESTNET_RPC_URL,
+ &["payy", "payytestnet"],
+ ),
+ (
+ "payy-dev",
+ "Payy Dev",
+ 7297,
+ "PUSD",
+ PAYY_DEV_RPC_URL,
+ &["payydev"],
+ ),
+ ("sepolia", "Sepolia", 11155111, "ETH", SEPOLIA_RPC_URL, &[]),
+ (
+ "hardhat",
+ "Hardhat",
+ 1337,
+ "ETH",
+ "http://127.0.0.1:8545",
+ &["local"],
+ ),
+];
+
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+pub struct ChainEntry {
+ pub aliases: Vec,
+ pub chain_id: u64,
+ pub display_name: String,
+ pub is_builtin: bool,
+ pub key: String,
+ pub native_symbol: String,
+}
+
+#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
+pub struct BeamChains {
+ pub chains: Vec,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+pub struct ConfiguredChain {
+ #[serde(default)]
+ pub aliases: Vec,
+ pub chain_id: u64,
+ pub name: String,
+ #[serde(default = "default_native_symbol")]
+ pub native_symbol: String,
+}
+
+pub async fn load_chains(root: &Path) -> Result> {
+ let store = JsonStore::new_with_invalid_json_behavior_and_access(
+ root,
+ "chains.json",
+ InvalidJsonBehavior::Error,
+ FileAccess::OwnerOnly,
+ )
+ .await
+ .context("load beam chains store")?;
+ Ok(store)
+}
+
+pub fn all_chains(configured: &BeamChains) -> Vec {
+ let mut chains = builtin_chains();
+ chains.extend(configured.chains.iter().map(custom_chain_entry));
+ chains
+}
+
+pub fn find_chain(selection: &str, configured: &BeamChains) -> Result {
+ let chains = all_chains(configured);
+ if let Ok(chain_id) = selection.parse::()
+ && let Some(chain) = chains.iter().find(|entry| entry.chain_id == chain_id)
+ {
+ return Ok(chain.clone());
+ }
+
+ let needle = canonicalize(selection);
+ let chain = chains
+ .into_iter()
+ .find(|entry| entry.key == needle || entry.aliases.iter().any(|alias| alias == &needle))
+ .ok_or_else(|| Error::UnknownChain {
+ chain: selection.to_string(),
+ })?;
+
+ Ok(chain)
+}
+
+pub fn builtin_rpc_url(chain_key: &str) -> Option<&'static str> {
+ BUILTIN_CHAINS
+ .iter()
+ .find_map(|spec| (spec.0 == chain_key).then_some(spec.4))
+}
+
+pub fn chain_key(name: &str) -> String {
+ canonicalize(name)
+}
+
+pub async fn resolve_rpc_chain_id(rpc_url: &str) -> Result {
+ let client = client_for_rpc(rpc_url)?;
+ resolve_client_chain_id(&client).await
+}
+
+pub async fn resolve_client_chain_id(client: &Client) -> Result {
+ let chain_id = client
+ .chain_id_contracts()
+ .await
+ .context("fetch beam chain id from rpc")?;
+
+ Ok(chain_id.low_u64())
+}
+
+pub async fn ensure_client_matches_chain_id(
+ chain_key: &str,
+ expected_chain_id: u64,
+ client: &Client,
+) -> Result<()> {
+ let actual_chain_id = resolve_client_chain_id(client).await?;
+ if actual_chain_id != expected_chain_id {
+ return Err(Error::RpcChainIdMismatch {
+ actual: actual_chain_id,
+ chain: chain_key.to_string(),
+ expected: expected_chain_id,
+ });
+ }
+
+ Ok(())
+}
+
+pub async fn ensure_rpc_matches_chain_id(
+ chain_key: &str,
+ expected_chain_id: u64,
+ rpc_url: &str,
+) -> Result<()> {
+ let client = client_for_rpc(rpc_url)?;
+ ensure_client_matches_chain_id(chain_key, expected_chain_id, &client).await
+}
+
+pub fn default_rpc_urls() -> BTreeMap {
+ BUILTIN_CHAINS
+ .iter()
+ .map(|spec| (spec.0.to_string(), spec.4.to_string()))
+ .collect()
+}
+
+fn default_native_symbol() -> String {
+ "ETH".to_string()
+}
+
+fn builtin_chains() -> Vec {
+ BUILTIN_CHAINS.iter().map(builtin_entry).collect()
+}
+
+fn client_for_rpc(rpc_url: &str) -> Result {
+ Client::try_new(rpc_url, None).map_err(|_| Error::InvalidRpcUrl {
+ value: rpc_url.to_string(),
+ })
+}
+
+fn builtin_entry(spec: &BuiltinChainSpec) -> ChainEntry {
+ ChainEntry {
+ aliases: spec.5.iter().map(|alias| canonicalize(alias)).collect(),
+ chain_id: spec.2,
+ display_name: spec.1.to_string(),
+ is_builtin: true,
+ key: spec.0.to_string(),
+ native_symbol: spec.3.to_string(),
+ }
+}
+
+fn custom_chain_entry(chain: &ConfiguredChain) -> ChainEntry {
+ ChainEntry {
+ aliases: chain
+ .aliases
+ .iter()
+ .map(|alias| canonicalize(alias))
+ .collect(),
+ chain_id: chain.chain_id,
+ display_name: chain.name.clone(),
+ is_builtin: false,
+ key: canonicalize(&chain.name),
+ native_symbol: chain.native_symbol.clone(),
+ }
+}
+
+fn canonicalize(value: &str) -> String {
+ value
+ .trim()
+ .replace('_', " ")
+ .split_whitespace()
+ .map(|segment| segment.to_ascii_lowercase())
+ .collect::>()
+ .join("-")
+}
diff --git a/pkg/beam-cli/src/cli.rs b/pkg/beam-cli/src/cli.rs
new file mode 100644
index 0000000..482eef6
--- /dev/null
+++ b/pkg/beam-cli/src/cli.rs
@@ -0,0 +1,275 @@
+// lint-long-file-override allow-max-lines=280
+pub mod util;
+
+use clap::{Args, Parser, Subcommand};
+
+use crate::{display::ColorMode, output::OutputMode, runtime::InvocationOverrides};
+use util::UtilAction;
+
+#[derive(Debug, Parser)]
+#[command(name = "beam", version, about = "Ethereum wallet CLI")]
+pub struct Cli {
+ #[command(subcommand)]
+ pub command: Option,
+
+ #[arg(long, global = true)]
+ pub rpc: Option,
+
+ #[arg(long, global = true)]
+ pub from: Option,
+
+ #[arg(long, global = true)]
+ pub chain: Option,
+
+ #[arg(long, global = true, value_enum, default_value_t = OutputMode::Default)]
+ pub output: OutputMode,
+
+ #[arg(
+ long,
+ global = true,
+ value_enum,
+ default_value_t = ColorMode::Auto,
+ help = "The color of the log messages"
+ )]
+ pub color: ColorMode,
+
+ #[arg(long, global = true, hide = true, default_value_t = false)]
+ pub no_update_check: bool,
+}
+
+#[derive(Debug, Subcommand)]
+pub enum Command {
+ /// Manage stored wallets
+ #[command(name = "wallets")]
+ Wallet {
+ #[command(subcommand)]
+ action: WalletAction,
+ },
+ /// Run standalone utility commands
+ Util {
+ #[command(subcommand)]
+ action: UtilAction,
+ },
+ /// Manage chain presets
+ #[command(name = "chains")]
+ Chain {
+ #[command(subcommand)]
+ action: ChainAction,
+ },
+ /// Manage RPC endpoints for the active chain
+ Rpc {
+ #[command(subcommand)]
+ action: RpcAction,
+ },
+ /// Manage tracked tokens for the active chain
+ Tokens {
+ #[command(subcommand)]
+ action: Option,
+ },
+ /// Show balances for tracked tokens or a specific token
+ Balance(BalanceArgs),
+ /// Send the native token
+ Transfer(TransferArgs),
+ /// Inspect a transaction
+ #[command(name = "txn", visible_alias = "tx")]
+ Txn(TxnArgs),
+ /// Inspect a block
+ Block(BlockArgs),
+ /// Work with ERC20 tokens
+ Erc20 {
+ #[command(subcommand)]
+ action: Erc20Action,
+ },
+ /// Run a read-only contract call
+ Call(CallArgs),
+ /// Send a contract transaction
+ Send(SendArgs),
+ /// Check for beam updates
+ Update,
+ #[command(name = "__refresh-update-status", hide = true)]
+ RefreshUpdateStatus,
+}
+
+#[derive(Debug, Subcommand)]
+pub enum WalletAction {
+ /// Create a new wallet
+ Create { name: Option },
+ /// Import a wallet from a private key
+ Import {
+ #[command(flatten)]
+ private_key_source: PrivateKeySourceArgs,
+ #[arg(long)]
+ name: Option,
+ },
+ /// List stored wallets
+ List,
+ /// Rename a stored wallet
+ Rename { name: String, new_name: String },
+ /// Derive an address from a private key
+ Address {
+ #[command(flatten)]
+ private_key_source: PrivateKeySourceArgs,
+ },
+ /// Set the default wallet
+ Use { name: String },
+}
+
+#[derive(Debug, Subcommand)]
+pub enum ChainAction {
+ /// List available chains
+ List,
+ /// Add a custom chain
+ Add(ChainAddArgs),
+ /// Remove a custom chain
+ Remove { chain: String },
+ /// Set the default chain
+ Use { chain: String },
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct ChainAddArgs {
+ pub name: Option,
+ pub rpc: Option,
+ #[arg(long)]
+ pub chain_id: Option,
+ #[arg(long)]
+ pub native_symbol: Option,
+}
+
+#[derive(Clone, Debug, Default, Args, PartialEq, Eq)]
+pub struct PrivateKeySourceArgs {
+ #[arg(
+ long,
+ default_value_t = false,
+ conflicts_with = "private_key_fd",
+ help = "Read the private key from stdin instead of prompting"
+ )]
+ pub private_key_stdin: bool,
+
+ #[arg(
+ long,
+ value_name = "FD",
+ conflicts_with = "private_key_stdin",
+ help = "Read the private key from an already-open file descriptor"
+ )]
+ pub private_key_fd: Option,
+}
+
+#[derive(Debug, Subcommand)]
+pub enum RpcAction {
+ /// List RPC endpoints for the active chain
+ List,
+ /// Add an RPC endpoint to the active chain
+ Add(RpcAddArgs),
+ /// Remove an RPC endpoint from the active chain
+ Remove { rpc: String },
+ /// Set the default RPC endpoint for the active chain
+ Use { rpc: String },
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct RpcAddArgs {
+ pub rpc: Option,
+}
+
+#[derive(Debug, Subcommand)]
+pub enum Erc20Action {
+ /// Show an ERC20 token balance
+ Balance {
+ token: String,
+ address: Option,
+ },
+ /// Send ERC20 tokens
+ Transfer {
+ token: String,
+ to: String,
+ amount: String,
+ },
+ /// Approve an ERC20 spender
+ Approve {
+ token: String,
+ spender: String,
+ amount: String,
+ },
+}
+
+#[derive(Debug, Subcommand)]
+pub enum TokenAction {
+ /// List tracked tokens and their balances
+ List,
+ /// Add a token to the tracked list
+ Add(TokenAddArgs),
+ /// Remove a token from the tracked list
+ Remove { token: String },
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct TokenAddArgs {
+ pub token: Option,
+ pub label: Option,
+ #[arg(long)]
+ pub decimals: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct BalanceArgs {
+ pub token: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct TransferArgs {
+ pub to: String,
+ pub amount: String,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct TxnArgs {
+ pub tx_hash: String,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct BlockArgs {
+ pub block: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct CallArgs {
+ pub contract: String,
+ pub function_sig: String,
+ pub args: Vec,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct SendArgs {
+ #[command(flatten)]
+ pub call: CallArgs,
+
+ #[arg(long, help = "Amount of native token to attach to the contract call")]
+ pub value: Option,
+}
+
+impl Cli {
+ pub fn overrides(&self) -> InvocationOverrides {
+ InvocationOverrides {
+ chain: self.chain.clone(),
+ from: self.from.clone(),
+ rpc: self.rpc.clone(),
+ }
+ }
+
+ pub fn is_interactive(&self) -> bool {
+ self.command.is_none()
+ }
+}
+
+impl Command {
+ pub(crate) fn is_sensitive(&self) -> bool {
+ matches!(self, Self::Wallet { action } if action.is_sensitive())
+ }
+}
+
+impl WalletAction {
+ pub(crate) fn is_sensitive(&self) -> bool {
+ matches!(self, Self::Import { .. } | Self::Address { .. })
+ }
+}
diff --git a/pkg/beam-cli/src/cli/util.rs b/pkg/beam-cli/src/cli/util.rs
new file mode 100644
index 0000000..f2305d1
--- /dev/null
+++ b/pkg/beam-cli/src/cli/util.rs
@@ -0,0 +1,263 @@
+// lint-long-file-override allow-max-lines=280
+use clap::{Args, Subcommand};
+
+#[derive(Debug, Subcommand)]
+pub enum UtilAction {
+ /// Encode ABI arguments
+ AbiEncode(AbiSignatureArgs),
+ /// Encode ABI event arguments
+ AbiEncodeEvent(AbiSignatureArgs),
+ /// Print the zero address
+ AddressZero,
+ /// Build calldata from a function signature
+ Calldata(AbiSignatureArgs),
+ /// Compute a contract deployment address
+ ComputeAddress(ComputeAddressArgs),
+ /// Concatenate hex values
+ ConcatHex(MultiValueArgs),
+ /// Compute a CREATE2 contract address
+ Create2(Create2Args),
+ /// Decode ABI input or output data
+ DecodeAbi(DecodeAbiArgs),
+ /// Decode function calldata
+ DecodeCalldata(SignatureDataArgs),
+ /// Decode custom error data
+ DecodeError(DecodeErrorArgs),
+ /// Decode event data and topics
+ DecodeEvent(DecodeEventArgs),
+ /// Decode an ABI-encoded string
+ DecodeString(InputValueArgs),
+ /// Convert text to a bytes32 string
+ FormatBytes32String(InputValueArgs),
+ /// Format a value using a unit scale
+ FormatUnits(ValueUnitArgs),
+ /// Convert binary input to hex
+ FromBin(InputValueArgs),
+ /// Convert a fixed-point value to an integer
+ FromFixedPoint(DecimalsValueArgs),
+ /// Decode an RLP value
+ FromRlp(FromRlpArgs),
+ /// Convert UTF-8 text to hex
+ FromUtf8(InputValueArgs),
+ /// Format wei as decimal units
+ FromWei(ValueUnitArgs),
+ /// Hash a message with the Ethereum prefix
+ HashMessage(InputValueArgs),
+ /// Print the zero hash
+ HashZero,
+ /// Compute a mapping storage slot
+ Index(IndexArgs),
+ /// Compute an ERC-7201 storage slot
+ IndexErc7201(InputValueArgs),
+ /// Compute a keccak256 hash
+ Keccak(InputValueArgs),
+ /// Print the maximum signed integer
+ MaxInt(IntegerTypeArgs),
+ /// Print the maximum unsigned integer
+ MaxUint(IntegerTypeArgs),
+ /// Print the minimum signed integer
+ MinInt(IntegerTypeArgs),
+ /// Compute an ENS namehash
+ Namehash(InputValueArgs),
+ /// Pad hex data to a fixed length
+ Pad(PadArgs),
+ /// Decode an address from bytes32
+ ParseBytes32Address(InputValueArgs),
+ /// Decode text from bytes32
+ ParseBytes32String(InputValueArgs),
+ /// Parse decimal units into an integer
+ ParseUnits(ValueUnitArgs),
+ /// Pretty-print calldata
+ PrettyCalldata(InputValueArgs),
+ /// Shift a value left
+ Shl(ShiftArgs),
+ /// Shift a value right
+ Shr(ShiftArgs),
+ /// Compute a function selector
+ Sig(InputValueArgs),
+ /// Compute an event topic selector
+ SigEvent(InputValueArgs),
+ /// Convert hex data to ASCII
+ ToAscii(InputValueArgs),
+ /// Convert a value to another base
+ ToBase(BaseConvertArgs),
+ /// Convert a value to bytes32
+ ToBytes32(InputValueArgs),
+ /// Format an address with checksum casing
+ ToCheckSumAddress(ChecksumArgs),
+ /// Convert a value to decimal
+ ToDec(BaseValueArgs),
+ /// Convert an integer to fixed-point form
+ ToFixedPoint(DecimalsValueArgs),
+ /// Convert a value to hex
+ ToHex(BaseValueArgs),
+ /// Normalize input as hex data
+ ToHexdata(InputValueArgs),
+ /// Convert a value to int256
+ ToInt256(SignedInputValueArgs),
+ /// Encode a value as RLP
+ ToRlp(InputValueArgs),
+ /// Convert a value to uint256
+ ToUint256(InputValueArgs),
+ /// Convert a value to a named unit
+ ToUnit(ValueUnitArgs),
+ /// Convert hex data to UTF-8
+ ToUtf8(InputValueArgs),
+ /// Parse decimal units into wei
+ ToWei(ValueUnitArgs),
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct AbiSignatureArgs {
+ pub sig: String,
+ pub args: Vec,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct BaseConvertArgs {
+ #[arg(allow_hyphen_values = true)]
+ pub value: Option,
+ pub base: Option,
+ #[arg(long = "base-in")]
+ pub base_in: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct BaseValueArgs {
+ #[arg(allow_hyphen_values = true)]
+ pub value: Option,
+ #[arg(long = "base-in")]
+ pub base_in: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct ChecksumArgs {
+ pub address: Option,
+ pub chain_id: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct ComputeAddressArgs {
+ pub address: Option,
+ #[arg(long)]
+ pub nonce: Option,
+ #[arg(long)]
+ pub salt: Option,
+ #[arg(long = "init-code", conflicts_with = "init_code_hash")]
+ pub init_code: Option,
+ #[arg(long = "init-code-hash", conflicts_with = "init_code")]
+ pub init_code_hash: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct Create2Args {
+ #[arg(long)]
+ pub deployer: Option,
+ #[arg(long)]
+ pub salt: String,
+ #[arg(long = "init-code", conflicts_with = "init_code_hash")]
+ pub init_code: Option,
+ #[arg(long = "init-code-hash", conflicts_with = "init_code")]
+ pub init_code_hash: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct DecodeAbiArgs {
+ pub sig: String,
+ pub calldata: Option,
+ #[arg(short, long, default_value_t = false)]
+ pub input: bool,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct DecodeErrorArgs {
+ #[arg(long)]
+ pub sig: String,
+ pub data: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct DecodeEventArgs {
+ #[arg(long)]
+ pub sig: String,
+ pub data: Option,
+ #[arg(long = "topic")]
+ pub topics: Vec,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct DecimalsValueArgs {
+ pub decimals: Option,
+ #[arg(allow_hyphen_values = true)]
+ pub value: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct FromRlpArgs {
+ pub value: Option,
+ #[arg(long, default_value_t = false)]
+ pub as_int: bool,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct IndexArgs {
+ pub key_type: String,
+ pub key: String,
+ pub slot_number: String,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct InputValueArgs {
+ pub value: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct IntegerTypeArgs {
+ pub ty: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct MultiValueArgs {
+ pub values: Vec,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct PadArgs {
+ pub data: Option,
+ #[arg(long, default_value_t = 32)]
+ pub len: usize,
+ #[arg(long, default_value_t = false, conflicts_with = "left")]
+ pub right: bool,
+ #[arg(long, default_value_t = false, conflicts_with = "right")]
+ pub left: bool,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct ShiftArgs {
+ #[arg(allow_hyphen_values = true)]
+ pub value: String,
+ pub bits: String,
+ #[arg(long = "base-in")]
+ pub base_in: Option,
+ #[arg(long = "base-out")]
+ pub base_out: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct SignatureDataArgs {
+ pub sig: String,
+ pub data: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct SignedInputValueArgs {
+ #[arg(allow_hyphen_values = true)]
+ pub value: Option,
+}
+
+#[derive(Clone, Debug, Args)]
+pub struct ValueUnitArgs {
+ #[arg(allow_hyphen_values = true)]
+ pub value: Option,
+ pub unit: Option,
+}
diff --git a/pkg/beam-cli/src/commands/balance.rs b/pkg/beam-cli/src/commands/balance.rs
new file mode 100644
index 0000000..21e0124
--- /dev/null
+++ b/pkg/beam-cli/src/commands/balance.rs
@@ -0,0 +1,93 @@
+use serde_json::json;
+
+use crate::{
+ cli::BalanceArgs,
+ commands::{erc20, tokens},
+ error::{Error, Result},
+ evm::{erc20_balance, format_units, native_balance},
+ human_output::sanitize_control_chars,
+ output::{CommandOutput, balance_message, with_loading},
+ runtime::BeamApp,
+};
+
+pub async fn run(app: &BeamApp, args: BalanceArgs) -> Result<()> {
+ let Some(token_selector) = args.token else {
+ return tokens::list_tokens(app).await;
+ };
+ let (chain, client) = app.active_chain_client().await?;
+ let address = app.active_address().await?;
+ if !tokens::is_native_selector(&token_selector, &chain.entry.native_symbol) {
+ let token = app
+ .token_for_chain(&token_selector, &chain.entry.key)
+ .await?;
+ let display_label = sanitize_control_chars(&token.label);
+ let (label, decimals, balance) = with_loading(
+ app.output_mode,
+ format!("Fetching {display_label} balance for {address:#x}..."),
+ async {
+ let (label, decimals) = tokens::resolve_erc20_metadata(&client, &token).await?;
+ let balance = erc20_balance(&client, token.address, address).await?;
+ Ok::<_, Error>((label, decimals, balance))
+ },
+ )
+ .await?;
+ let formatted = format_units(balance, decimals);
+
+ return erc20::render_balance_output(
+ &chain.entry.key,
+ &label,
+ &format!("{:#x}", token.address),
+ &format!("{address:#x}"),
+ &formatted,
+ decimals,
+ &balance.to_string(),
+ )
+ .print(app.output_mode);
+ }
+
+ let balance = with_loading(
+ app.output_mode,
+ format!("Fetching balance for {address:#x}..."),
+ async { native_balance(&client, address).await },
+ )
+ .await?;
+ let formatted = format_units(balance, 18);
+ let address = format!("{address:#x}");
+ let wei = balance.to_string();
+
+ render_balance_output(
+ &chain.entry.key,
+ &chain.entry.native_symbol,
+ &chain.rpc_url,
+ &address,
+ &formatted,
+ &wei,
+ )
+ .print(app.output_mode)
+}
+
+pub(crate) fn render_balance_output(
+ chain_key: &str,
+ native_symbol: &str,
+ rpc_url: &str,
+ address: &str,
+ formatted: &str,
+ wei: &str,
+) -> CommandOutput {
+ CommandOutput::new(
+ balance_message(format!("{formatted} {native_symbol}"), address),
+ json!({
+ "address": address,
+ "balance": formatted,
+ "chain": chain_key,
+ "native_symbol": native_symbol,
+ "rpc_url": rpc_url,
+ "wei": wei,
+ }),
+ )
+ .compact(formatted.to_string())
+ .markdown(format!(
+ "- Chain: `{}`\n- Address: `{address}`\n- Balance: `{formatted} {}`",
+ chain_key, native_symbol,
+ ))
+}
diff --git a/pkg/beam-cli/src/commands/block.rs b/pkg/beam-cli/src/commands/block.rs
new file mode 100644
index 0000000..2a6bfa2
--- /dev/null
+++ b/pkg/beam-cli/src/commands/block.rs
@@ -0,0 +1,149 @@
+use contextful::ResultContextExt;
+use serde_json::json;
+use web3::types::{BlockId, BlockNumber, H256};
+
+use crate::{
+ cli::BlockArgs,
+ error::{Error, Result},
+ output::{CommandOutput, with_loading},
+ runtime::BeamApp,
+};
+
+pub async fn run(app: &BeamApp, args: BlockArgs) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let selector = args.block.unwrap_or_else(|| "latest".to_string());
+ let block_id = parse_block_id(&selector)?;
+ let block = with_loading(
+ app.output_mode,
+ format!("Fetching block {selector}..."),
+ async {
+ client
+ .block(block_id)
+ .await
+ .context("fetch beam block")?
+ .ok_or_else(|| Error::BlockNotFound {
+ block: selector.clone(),
+ })
+ },
+ )
+ .await?;
+ let block_hash = block.hash.map(|value| format!("{value:#x}"));
+ let block_number = block.number.map(|value| value.as_u64());
+ let json_block = serde_json::to_value(&block).context("serialize beam block output")?;
+
+ CommandOutput::new(
+ render_block_default(&chain.entry.key, &selector, &block),
+ json!({
+ "block": json_block,
+ "chain": chain.entry.key,
+ "selector": selector,
+ }),
+ )
+ .compact(
+ block_hash
+ .clone()
+ .or_else(|| block_number.map(|value| value.to_string()))
+ .unwrap_or_else(|| "unknown".to_string()),
+ )
+ .markdown(render_block_markdown(&chain.entry.key, &selector, &block))
+ .print(app.output_mode)
+}
+
+pub(crate) fn parse_block_id(value: &str) -> Result {
+ let value = value.trim();
+ let block = match value {
+ "latest" => BlockId::Number(BlockNumber::Latest),
+ "earliest" => BlockId::Number(BlockNumber::Earliest),
+ "pending" => BlockId::Number(BlockNumber::Pending),
+ "safe" => BlockId::Number(BlockNumber::Safe),
+ "finalized" | "finalised" => BlockId::Number(BlockNumber::Finalized),
+ value if value.starts_with("0x") && value.len() == 66 => {
+ BlockId::Hash(parse_hash(value).map_err(|_| Error::InvalidBlockSelector {
+ value: value.to_string(),
+ })?)
+ }
+ value if value.starts_with("0x") => {
+ let block_number =
+ u64::from_str_radix(value.trim_start_matches("0x"), 16).map_err(|_| {
+ Error::InvalidBlockSelector {
+ value: value.to_string(),
+ }
+ })?;
+ BlockId::Number(BlockNumber::Number(block_number.into()))
+ }
+ value => {
+ let block_number = value
+ .parse::()
+ .map_err(|_| Error::InvalidBlockSelector {
+ value: value.to_string(),
+ })?;
+ BlockId::Number(BlockNumber::Number(block_number.into()))
+ }
+ };
+
+ Ok(block)
+}
+
+fn parse_hash(value: &str) -> std::result::Result {
+ value.parse::().map_err(|_| ())
+}
+
+fn render_block_default(chain: &str, selector: &str, block: &web3::types::Block) -> String {
+ let number = block
+ .number
+ .map_or_else(|| "unknown".to_string(), |value| value.as_u64().to_string());
+ let hash = block
+ .hash
+ .map_or_else(|| "unknown".to_string(), |value| format!("{value:#x}"));
+ let base_fee = block
+ .base_fee_per_gas
+ .map_or_else(|| "unknown".to_string(), |value| value.to_string());
+ let miner = format!("{:#x}", block.author);
+ let size = block
+ .size
+ .map_or_else(|| "unknown".to_string(), |value| value.to_string());
+
+ format!(
+ "Chain: {chain}\nSelector: {selector}\nNumber: {}\nHash: {}\nParent: {:#x}\nTimestamp: {}\nTransactions: {}\nGas used: {}\nGas limit: {}\nBase fee: {}\nMiner: {}\nSize: {}",
+ number,
+ hash,
+ block.parent_hash,
+ block.timestamp,
+ block.transactions.len(),
+ block.gas_used,
+ block.gas_limit,
+ base_fee,
+ miner,
+ size,
+ )
+}
+
+fn render_block_markdown(chain: &str, selector: &str, block: &web3::types::Block) -> String {
+ let number = block
+ .number
+ .map_or_else(|| "unknown".to_string(), |value| value.as_u64().to_string());
+ let hash = block
+ .hash
+ .map_or_else(|| "unknown".to_string(), |value| format!("{value:#x}"));
+ let base_fee = block
+ .base_fee_per_gas
+ .map_or_else(|| "unknown".to_string(), |value| value.to_string());
+ let miner = format!("{:#x}", block.author);
+ let size = block
+ .size
+ .map_or_else(|| "unknown".to_string(), |value| value.to_string());
+
+ format!(
+ "- Chain: `{chain}`\n- Selector: `{selector}`\n- Number: `{}`\n- Hash: `{}`\n- Parent: `{:#x}`\n- Timestamp: `{}`\n- Transactions: `{}`\n- Gas used: `{}`\n- Gas limit: `{}`\n- Base fee: `{}`\n- Miner: `{}`\n- Size: `{}`",
+ number,
+ hash,
+ block.parent_hash,
+ block.timestamp,
+ block.transactions.len(),
+ block.gas_used,
+ block.gas_limit,
+ base_fee,
+ miner,
+ size,
+ )
+}
diff --git a/pkg/beam-cli/src/commands/call.rs b/pkg/beam-cli/src/commands/call.rs
new file mode 100644
index 0000000..0ff5176
--- /dev/null
+++ b/pkg/beam-cli/src/commands/call.rs
@@ -0,0 +1,199 @@
+use contracts::U256;
+use serde_json::json;
+use web3::ethabi::{Function, ParamType, StateMutability};
+
+use crate::{
+ abi::parse_function,
+ cli::{CallArgs, SendArgs},
+ commands::signing::prompt_active_signer,
+ error::Result,
+ evm::{FunctionCall, call_function, parse_units, send_function},
+ output::{
+ CommandOutput, confirmed_transaction_message, dropped_transaction_message,
+ pending_transaction_message, with_loading, with_loading_handle,
+ },
+ runtime::{BeamApp, parse_address},
+ transaction::{TransactionExecution, loading_message},
+};
+
+pub async fn run_read(app: &BeamApp, args: CallArgs) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let contract = parse_address(&args.contract)?;
+ let function = parse_function(&args.function_sig, StateMutability::View)?;
+ let call_args = resolve_address_args(app, &function, &args.args).await?;
+ let from = app.active_optional_address().await?;
+ let outcome = with_loading(
+ app.output_mode,
+ format!("Calling {contract:#x}..."),
+ async { call_function(&client, from, contract, &function, &call_args).await },
+ )
+ .await?;
+ let default = match &outcome.decoded {
+ Some(decoded) => format!("Raw: {}\nDecoded: {decoded}", outcome.raw),
+ None => outcome.raw.clone(),
+ };
+
+ CommandOutput::new(
+ default,
+ json!({
+ "chain": chain.entry.key,
+ "contract": format!("{contract:#x}"),
+ "decoded": outcome.decoded,
+ "raw": outcome.raw,
+ "signature": args.function_sig,
+ }),
+ )
+ .compact(outcome.raw)
+ .print(app.output_mode)
+}
+
+pub async fn run_write(app: &BeamApp, args: SendArgs) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let chain_key = chain.entry.key.clone();
+ let native_symbol = chain.entry.native_symbol.clone();
+ let value_display = args.value.clone().unwrap_or_else(|| "0".to_string());
+ let value = parse_transaction_value(args.value.as_deref())?;
+ let contract = parse_address(&args.call.contract)?;
+ let function = parse_function(&args.call.function_sig, StateMutability::NonPayable)?;
+ let call_args = resolve_address_args(app, &function, &args.call.args).await?;
+ let signer = prompt_active_signer(app).await?;
+ let action = if value.is_zero() {
+ format!("transaction to {contract:#x}")
+ } else {
+ format!("transaction to {contract:#x} with {value_display} {native_symbol}")
+ };
+ let execution = with_loading_handle(
+ app.output_mode,
+ format!("Sending {action} and waiting for confirmation..."),
+ |loading| async move {
+ send_function(
+ &client,
+ &signer,
+ FunctionCall {
+ args: &call_args,
+ contract,
+ function: &function,
+ value,
+ },
+ move |update| loading.set_message(loading_message(&action, &update)),
+ tokio::signal::ctrl_c(),
+ )
+ .await
+ },
+ )
+ .await?;
+
+ match execution {
+ TransactionExecution::Confirmed(outcome) => {
+ let tx_hash = outcome.tx_hash.clone();
+ let block_number = outcome.block_number;
+ let summary = if value.is_zero() {
+ format!("Confirmed transaction to {contract:#x}")
+ } else {
+ format!(
+ "Confirmed transaction to {contract:#x} with {value_display} {native_symbol}"
+ )
+ };
+
+ CommandOutput::new(
+ confirmed_transaction_message(summary, &tx_hash, block_number),
+ json!({
+ "block_number": block_number,
+ "chain": chain_key,
+ "contract": format!("{contract:#x}"),
+ "native_symbol": native_symbol,
+ "signature": args.call.function_sig,
+ "state": "confirmed",
+ "status": outcome.status,
+ "tx_hash": tx_hash,
+ "value": value_display,
+ }),
+ )
+ .compact(outcome.tx_hash.clone())
+ .print(app.output_mode)
+ }
+ TransactionExecution::Pending(pending) => {
+ let tx_hash = pending.tx_hash.clone();
+ let summary = if value.is_zero() {
+ format!(
+ "Submitted transaction to {contract:#x} and stopped waiting for confirmation"
+ )
+ } else {
+ format!(
+ "Submitted transaction to {contract:#x} with {value_display} {native_symbol} and stopped waiting for confirmation"
+ )
+ };
+
+ CommandOutput::new(
+ pending_transaction_message(summary, &tx_hash, pending.block_number),
+ json!({
+ "block_number": pending.block_number,
+ "chain": chain_key,
+ "contract": format!("{contract:#x}"),
+ "native_symbol": native_symbol,
+ "signature": args.call.function_sig,
+ "state": "pending",
+ "status": null,
+ "tx_hash": tx_hash,
+ "value": value_display,
+ }),
+ )
+ .compact(tx_hash)
+ .print(app.output_mode)
+ }
+ TransactionExecution::Dropped(dropped) => {
+ let tx_hash = dropped.tx_hash.clone();
+ let summary = if value.is_zero() {
+ format!(
+ "Submitted transaction to {contract:#x}, but the node no longer reports the transaction"
+ )
+ } else {
+ format!(
+ "Submitted transaction to {contract:#x} with {value_display} {native_symbol}, but the node no longer reports the transaction"
+ )
+ };
+
+ CommandOutput::new(
+ dropped_transaction_message(summary, &tx_hash, dropped.block_number),
+ json!({
+ "block_number": dropped.block_number,
+ "chain": chain_key,
+ "contract": format!("{contract:#x}"),
+ "native_symbol": native_symbol,
+ "signature": args.call.function_sig,
+ "state": "dropped",
+ "status": null,
+ "tx_hash": tx_hash,
+ "value": value_display,
+ }),
+ )
+ .compact(dropped.tx_hash)
+ .print(app.output_mode)
+ }
+ }
+}
+
+pub(crate) async fn resolve_address_args(
+ app: &BeamApp,
+ function: &Function,
+ args: &[String],
+) -> Result> {
+ if function.inputs.len() != args.len() {
+ return Ok(args.to_vec());
+ }
+
+ let mut resolved = Vec::with_capacity(args.len());
+ for (param, arg) in function.inputs.iter().zip(args) {
+ if matches!(param.kind, ParamType::Address) {
+ resolved.push(format!("{:#x}", app.resolve_wallet_or_address(arg).await?));
+ } else {
+ resolved.push(arg.clone());
+ }
+ }
+
+ Ok(resolved)
+}
+
+pub(crate) fn parse_transaction_value(value: Option<&str>) -> Result {
+ value.map_or(Ok(U256::zero()), |value| parse_units(value, 18))
+}
diff --git a/pkg/beam-cli/src/commands/chain.rs b/pkg/beam-cli/src/commands/chain.rs
new file mode 100644
index 0000000..af717ca
--- /dev/null
+++ b/pkg/beam-cli/src/commands/chain.rs
@@ -0,0 +1,289 @@
+// lint-long-file-override allow-max-lines=300
+use contextful::ResultContextExt;
+use serde_json::json;
+
+use crate::{
+ chains::{
+ BeamChains, ConfiguredChain, all_chains, chain_key, ensure_rpc_matches_chain_id,
+ find_chain, resolve_rpc_chain_id,
+ },
+ cli::{ChainAction, ChainAddArgs},
+ config::ChainRpcConfig,
+ error::{Error, Result},
+ human_output::{sanitize_control_chars, sanitize_control_chars_trimmed},
+ output::{CommandOutput, with_loading},
+ prompts::{prompt_required, prompt_with_default},
+ runtime::BeamApp,
+ table::{render_markdown_table, render_table},
+};
+
+const DEFAULT_CHAIN_KEY: &str = "ethereum";
+const DEFAULT_NATIVE_SYMBOL: &str = "ETH";
+
+pub async fn run(app: &BeamApp, action: ChainAction) -> Result<()> {
+ match action {
+ ChainAction::List => list_chains(app).await,
+ ChainAction::Add(args) => add_chain(app, args).await,
+ ChainAction::Remove { chain } => remove_chain(app, &chain).await,
+ ChainAction::Use { chain } => use_chain(app, &chain).await,
+ }
+}
+
+pub(crate) async fn add_chain(app: &BeamApp, args: ChainAddArgs) -> Result<()> {
+ let ChainAddArgs {
+ name,
+ rpc,
+ chain_id,
+ native_symbol,
+ } = args;
+ let interactive_native_symbol = native_symbol.is_none() && (name.is_none() || rpc.is_none());
+ let name = normalize_chain_name(match name {
+ Some(name) => name,
+ None => prompt_required("beam chain name")?,
+ })?;
+ let rpc_url = match rpc {
+ Some(rpc_url) => rpc_url,
+ None => prompt_required("beam chain rpc")?,
+ };
+ let mut beam_chains = app.chain_store.get().await;
+ validate_new_chain_name(&name, &beam_chains)?;
+ let key = chain_key(&name);
+ let chain_id = with_loading(
+ app.output_mode,
+ format!("Validating RPC {rpc_url}..."),
+ async {
+ match chain_id {
+ Some(chain_id) => {
+ ensure_rpc_matches_chain_id(&key, chain_id, &rpc_url).await?;
+ Ok(chain_id)
+ }
+ None => resolve_rpc_chain_id(&rpc_url).await,
+ }
+ },
+ )
+ .await?;
+
+ let native_symbol = normalize_native_symbol(match native_symbol {
+ Some(native_symbol) => Some(native_symbol),
+ None if interactive_native_symbol => Some(prompt_with_default(
+ "beam chain native symbol",
+ DEFAULT_NATIVE_SYMBOL,
+ )?),
+ None => None,
+ });
+ let configured_chain = ConfiguredChain {
+ aliases: Vec::new(),
+ chain_id,
+ name: name.clone(),
+ native_symbol: native_symbol.clone(),
+ };
+
+ let existing = all_chains(&beam_chains);
+ if existing.iter().any(|chain| chain.chain_id == chain_id) {
+ return Err(Error::ChainIdAlreadyExists { chain_id });
+ }
+
+ let mut config = app.config_store.get().await;
+ config
+ .rpc_configs
+ .insert(key.clone(), ChainRpcConfig::new(rpc_url.clone()));
+ beam_chains.chains.push(configured_chain);
+
+ app.config_store
+ .set(config)
+ .await
+ .context("persist beam chain rpc config")?;
+ app.chain_store
+ .set(beam_chains)
+ .await
+ .context("persist beam chains")?;
+
+ CommandOutput::new(
+ format!(
+ "Added chain {} ({}, id {chain_id}) with default RPC {rpc_url}",
+ sanitize_control_chars(&name),
+ sanitize_control_chars(&key)
+ ),
+ json!({
+ "chain": key,
+ "chain_id": chain_id,
+ "default_rpc": rpc_url,
+ "name": name,
+ "native_symbol": native_symbol,
+ }),
+ )
+ .compact(format!("{} {chain_id}", sanitize_control_chars(&key)))
+ .print(app.output_mode)
+}
+
+pub(crate) async fn remove_chain(app: &BeamApp, selection: &str) -> Result<()> {
+ let mut beam_chains = app.chain_store.get().await;
+ let chain = find_chain(selection, &beam_chains)?;
+ if chain.is_builtin {
+ return Err(Error::BuiltinChainRemovalNotAllowed {
+ chain: chain.key.clone(),
+ });
+ }
+
+ beam_chains
+ .chains
+ .retain(|configured| chain_key(&configured.name) != chain.key);
+
+ let mut config = app.config_store.get().await;
+ config.rpc_configs.remove(&chain.key);
+ config.known_tokens.remove(&chain.key);
+ config.tracked_tokens.remove(&chain.key);
+ if config.default_chain == chain.key {
+ config.default_chain = DEFAULT_CHAIN_KEY.to_string();
+ }
+
+ app.chain_store
+ .set(beam_chains)
+ .await
+ .context("persist beam chains")?;
+ app.config_store
+ .set(config)
+ .await
+ .context("persist beam chain removal config")?;
+
+ CommandOutput::new(
+ format!(
+ "Removed chain {} ({})",
+ sanitize_control_chars(&chain.display_name),
+ sanitize_control_chars(&chain.key)
+ ),
+ json!({
+ "chain": chain.key,
+ "chain_id": chain.chain_id,
+ "name": chain.display_name,
+ }),
+ )
+ .compact(sanitize_control_chars(&chain.key))
+ .print(app.output_mode)
+}
+
+pub(crate) async fn use_chain(app: &BeamApp, selection: &str) -> Result<()> {
+ let beam_chains = app.chain_store.get().await;
+ let chain = find_chain(selection, &beam_chains)?;
+ let config = app.config_store.get().await;
+ if config.rpc_config_for_chain(&chain).is_none() {
+ return Err(Error::NoRpcConfigured {
+ chain: chain.key.clone(),
+ });
+ }
+
+ let mut config = config;
+ config.default_chain = chain.key.clone();
+
+ app.config_store
+ .set(config)
+ .await
+ .context("persist beam default chain")?;
+
+ CommandOutput::new(
+ format!(
+ "Default chain set to {} ({})",
+ sanitize_control_chars(&chain.display_name),
+ chain.chain_id
+ ),
+ json!({
+ "chain": chain.key,
+ "chain_id": chain.chain_id,
+ "name": chain.display_name,
+ }),
+ )
+ .compact(sanitize_control_chars(&chain.key))
+ .print(app.output_mode)
+}
+
+async fn list_chains(app: &BeamApp) -> Result<()> {
+ let beam_chains = app.chain_store.get().await;
+ let chains = all_chains(&beam_chains);
+ let config = app.config_store.get().await;
+ let rows = chains
+ .iter()
+ .map(|chain| {
+ vec![
+ marker(config.default_chain == chain.key),
+ chain.key.clone(),
+ chain.display_name.clone(),
+ chain.chain_id.to_string(),
+ chain.native_symbol.clone(),
+ config
+ .rpc_config_for_chain(chain)
+ .map(|rpc_config| rpc_config.rpc_urls().len())
+ .unwrap_or_default()
+ .to_string(),
+ if chain.is_builtin {
+ "builtin".to_string()
+ } else {
+ "custom".to_string()
+ },
+ ]
+ })
+ .collect::>();
+ let headers = ["default", "chain", "name", "id", "symbol", "rpcs", "source"];
+
+ CommandOutput::new(
+ render_table(&headers, &rows),
+ json!({
+ "chains": chains.iter().map(|chain| {
+ json!({
+ "chain": chain.key,
+ "chain_id": chain.chain_id,
+ "is_builtin": chain.is_builtin,
+ "is_default": config.default_chain == chain.key,
+ "name": chain.display_name,
+ "native_symbol": chain.native_symbol,
+ "rpc_count": config
+ .rpc_config_for_chain(chain)
+ .map(|rpc_config| rpc_config.rpc_urls().len())
+ .unwrap_or_default(),
+ })
+ }).collect::>()
+ }),
+ )
+ .markdown(render_markdown_table(&headers, &rows))
+ .print(app.output_mode)
+}
+
+fn marker(active: bool) -> String {
+ if active {
+ "*".to_string()
+ } else {
+ String::new()
+ }
+}
+
+fn normalize_chain_name(name: String) -> Result {
+ let name = sanitize_control_chars_trimmed(&name);
+ if name.is_empty() {
+ return Err(Error::InvalidChainName { name });
+ }
+
+ Ok(name)
+}
+
+fn validate_new_chain_name(name: &str, configured: &BeamChains) -> Result<()> {
+ let key = chain_key(name);
+ if let Ok(existing_chain) = find_chain(&key, configured) {
+ return Err(if existing_chain.key == key {
+ Error::ChainNameAlreadyExists {
+ name: name.to_string(),
+ }
+ } else {
+ Error::ChainNameConflictsWithSelector {
+ name: name.to_string(),
+ }
+ });
+ }
+
+ Ok(())
+}
+
+fn normalize_native_symbol(native_symbol: Option) -> String {
+ native_symbol
+ .map(|value| sanitize_control_chars_trimmed(&value).to_ascii_uppercase())
+ .filter(|value| !value.is_empty())
+ .unwrap_or_else(|| DEFAULT_NATIVE_SYMBOL.to_string())
+}
diff --git a/pkg/beam-cli/src/commands/erc20.rs b/pkg/beam-cli/src/commands/erc20.rs
new file mode 100644
index 0000000..7f2027f
--- /dev/null
+++ b/pkg/beam-cli/src/commands/erc20.rs
@@ -0,0 +1,293 @@
+// lint-long-file-override allow-max-lines=300
+use serde_json::{Value, json};
+use web3::ethabi::StateMutability;
+
+use crate::{
+ abi::parse_function,
+ cli::Erc20Action,
+ commands::signing::prompt_active_signer,
+ error::{Error, Result},
+ evm::{FunctionCall, erc20_balance, erc20_decimals, format_units, parse_units, send_function},
+ human_output::sanitize_control_chars,
+ output::{
+ CommandOutput, OutputMode, confirmed_transaction_message, dropped_transaction_message,
+ pending_transaction_message, with_loading, with_loading_handle,
+ },
+ runtime::BeamApp,
+ transaction::{TransactionExecution, loading_message},
+};
+
+pub async fn run(app: &BeamApp, action: Erc20Action) -> Result<()> {
+ match action {
+ Erc20Action::Balance { token, address } => balance(app, &token, address.as_deref()).await,
+ Erc20Action::Transfer { token, to, amount } => transfer(app, &token, &to, &amount).await,
+ Erc20Action::Approve {
+ token,
+ spender,
+ amount,
+ } => approve(app, &token, &spender, &amount).await,
+ }
+}
+
+async fn balance(app: &BeamApp, token: &str, address: Option<&str>) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let token = app.token_for_chain(token, &chain.entry.key).await?;
+ let display_label = sanitize_control_chars(&token.label);
+ let owner = match address {
+ Some(address) => app.resolve_wallet_or_address(address).await?,
+ None => app.active_address().await?,
+ };
+ let (decimals, balance) = with_loading(
+ app.output_mode,
+ format!("Fetching {display_label} balance for {owner:#x}..."),
+ async {
+ let decimals = token
+ .decimals
+ .unwrap_or(erc20_decimals(&client, token.address).await?);
+ let balance = erc20_balance(&client, token.address, owner).await?;
+ Ok::<_, Error>((decimals, balance))
+ },
+ )
+ .await?;
+ let formatted = format_units(balance, decimals);
+ let owner = format!("{owner:#x}");
+ let token_address = format!("{:#x}", token.address);
+ let value = balance.to_string();
+
+ render_balance_output(
+ &chain.entry.key,
+ &token.label,
+ &token_address,
+ &owner,
+ &formatted,
+ decimals,
+ &value,
+ )
+ .print(app.output_mode)
+}
+
+pub(crate) fn render_balance_output(
+ chain_key: &str,
+ token_label: &str,
+ token_address: &str,
+ owner: &str,
+ formatted: &str,
+ decimals: u8,
+ value: &str,
+) -> CommandOutput {
+ CommandOutput::new(
+ format!(
+ "{formatted} {}\nAddress: {owner}\nToken: {token_address}",
+ sanitize_control_chars(token_label)
+ ),
+ json!({
+ "address": owner,
+ "balance": formatted,
+ "chain": chain_key,
+ "decimals": decimals,
+ "token": token_label,
+ "token_address": token_address,
+ "value": value,
+ }),
+ )
+ .compact(formatted.to_string())
+}
+
+async fn transfer(app: &BeamApp, token: &str, to: &str, amount: &str) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let token = app.token_for_chain(token, &chain.entry.key).await?;
+ let token_label = sanitize_control_chars(&token.label);
+ let to = app.resolve_wallet_or_address(to).await?;
+ let decimals = match token.decimals {
+ Some(decimals) => decimals,
+ None => {
+ with_loading(
+ app.output_mode,
+ format!("Fetching {token_label} token metadata..."),
+ async { erc20_decimals(&client, token.address).await },
+ )
+ .await?
+ }
+ };
+ let amount_value = parse_units(amount, usize::from(decimals))?;
+ let signer = prompt_active_signer(app).await?;
+ let function = parse_function("transfer(address,uint256)", StateMutability::NonPayable)?;
+ let action = format!("transfer of {amount} {token_label} to {to:#x}");
+ let execution = with_loading_handle(
+ app.output_mode,
+ format!("Sending {action} and waiting for confirmation..."),
+ |loading| async move {
+ send_function(
+ &client,
+ &signer,
+ FunctionCall {
+ args: &[format!("{to:#x}"), amount_value.to_string()],
+ contract: token.address,
+ function: &function,
+ value: 0u8.into(),
+ },
+ move |update| loading.set_message(loading_message(&action, &update)),
+ tokio::signal::ctrl_c(),
+ )
+ .await
+ },
+ )
+ .await?;
+
+ print_token_write_output(
+ app.output_mode,
+ execution,
+ TokenWriteOutputConfig {
+ amount: amount.to_string(),
+ chain_key: chain.entry.key.clone(),
+ confirmed_summary: format!("Confirmed transfer of {amount} {token_label} to {to:#x}"),
+ dropped_summary: format!(
+ "Submitted transfer of {amount} {token_label} to {to:#x}, but the node no longer reports the transaction"
+ ),
+ pending_summary: format!(
+ "Submitted transfer of {amount} {token_label} to {to:#x} and stopped waiting for confirmation"
+ ),
+ target_key: "to",
+ target_value: format!("{to:#x}"),
+ token_address: format!("{:#x}", token.address),
+ token_label: token.label.clone(),
+ },
+ )
+}
+
+async fn approve(app: &BeamApp, token: &str, spender: &str, amount: &str) -> Result<()> {
+ let (chain, client) = app.active_chain_client().await?;
+ let token = app.token_for_chain(token, &chain.entry.key).await?;
+ let token_label = sanitize_control_chars(&token.label);
+ let spender = app.resolve_wallet_or_address(spender).await?;
+ let decimals = match token.decimals {
+ Some(decimals) => decimals,
+ None => {
+ with_loading(
+ app.output_mode,
+ format!("Fetching {token_label} token metadata..."),
+ async { erc20_decimals(&client, token.address).await },
+ )
+ .await?
+ }
+ };
+ let amount_value = parse_units(amount, usize::from(decimals))?;
+ let signer = prompt_active_signer(app).await?;
+ let function = parse_function("approve(address,uint256)", StateMutability::NonPayable)?;
+ let action = format!("approval of {amount} {token_label} for {spender:#x}");
+ let execution = with_loading_handle(
+ app.output_mode,
+ format!("Sending {action} and waiting for confirmation..."),
+ |loading| async move {
+ send_function(
+ &client,
+ &signer,
+ FunctionCall {
+ args: &[format!("{spender:#x}"), amount_value.to_string()],
+ contract: token.address,
+ function: &function,
+ value: 0u8.into(),
+ },
+ move |update| loading.set_message(loading_message(&action, &update)),
+ tokio::signal::ctrl_c(),
+ )
+ .await
+ },
+ )
+ .await?;
+
+ print_token_write_output(
+ app.output_mode,
+ execution,
+ TokenWriteOutputConfig {
+ amount: amount.to_string(),
+ chain_key: chain.entry.key.clone(),
+ confirmed_summary: format!(
+ "Confirmed approval of {amount} {token_label} for {spender:#x}"
+ ),
+ dropped_summary: format!(
+ "Submitted approval of {amount} {token_label} for {spender:#x}, but the node no longer reports the transaction"
+ ),
+ pending_summary: format!(
+ "Submitted approval of {amount} {token_label} for {spender:#x} and stopped waiting for confirmation"
+ ),
+ target_key: "spender",
+ target_value: format!("{spender:#x}"),
+ token_address: format!("{:#x}", token.address),
+ token_label: token.label.clone(),
+ },
+ )
+}
+
+struct TokenWriteOutputConfig {
+ amount: String,
+ chain_key: String,
+ confirmed_summary: String,
+ dropped_summary: String,
+ pending_summary: String,
+ target_key: &'static str,
+ target_value: String,
+ token_address: String,
+ token_label: String,
+}
+
+fn print_token_write_output(
+ output_mode: OutputMode,
+ execution: TransactionExecution,
+ config: TokenWriteOutputConfig,
+) -> Result<()> {
+ let (default, state, block_number, status, tx_hash) = match execution {
+ TransactionExecution::Confirmed(outcome) => (
+ confirmed_transaction_message(
+ config.confirmed_summary,
+ &outcome.tx_hash,
+ outcome.block_number,
+ ),
+ "confirmed",
+ outcome.block_number,
+ outcome.status,
+ outcome.tx_hash,
+ ),
+ TransactionExecution::Pending(pending) => (
+ pending_transaction_message(
+ config.pending_summary,
+ &pending.tx_hash,
+ pending.block_number,
+ ),
+ "pending",
+ pending.block_number,
+ None,
+ pending.tx_hash,
+ ),
+ TransactionExecution::Dropped(dropped) => (
+ dropped_transaction_message(
+ config.dropped_summary,
+ &dropped.tx_hash,
+ dropped.block_number,
+ ),
+ "dropped",
+ dropped.block_number,
+ None,
+ dropped.tx_hash,
+ ),
+ };
+
+ let mut value = json!({
+ "amount": config.amount,
+ "block_number": block_number,
+ "chain": config.chain_key,
+ "state": state,
+ "status": status,
+ "token": config.token_label,
+ "token_address": config.token_address,
+ "tx_hash": tx_hash.clone(),
+ });
+ value.as_object_mut().expect("token write output").insert(
+ config.target_key.to_string(),
+ Value::String(config.target_value),
+ );
+
+ CommandOutput::new(default, value)
+ .compact(tx_hash)
+ .print(output_mode)
+}
diff --git a/pkg/beam-cli/src/commands/interactive.rs b/pkg/beam-cli/src/commands/interactive.rs
new file mode 100644
index 0000000..81ee6de
--- /dev/null
+++ b/pkg/beam-cli/src/commands/interactive.rs
@@ -0,0 +1,277 @@
+// lint-long-file-override allow-max-lines=300
+use std::path::Path;
+
+use contextful::{ErrorContextExt, ResultContextExt};
+use rustyline::{Config, Editor, error::ReadlineError, history::History};
+use serde_json::json;
+
+pub(crate) use super::interactive_history::should_persist_history;
+#[cfg(test)]
+pub(crate) use super::interactive_history::uses_matching_prefix_history_search;
+#[cfg(test)]
+pub(crate) use super::interactive_parse::repl_command_args;
+pub(crate) use super::interactive_parse::{
+ ParsedLine, is_exit_command, merge_overrides, normalized_repl_command, parse_line, repl_err,
+};
+use super::{
+ interactive_helper::{BeamHelper, help_text},
+ interactive_history::{ReplHistory, bind_matching_prefix_history_search, sanitize_history},
+ interactive_interrupt::run_with_interrupt_owner,
+ interactive_parse::{resolved_color_mode, resolved_output_mode},
+ interactive_state::{capture_repl_state, reconcile_repl_state, repl_state_mutation},
+};
+use crate::{
+ chains::{ensure_rpc_matches_chain_id, find_chain},
+ cli::BalanceArgs,
+ commands,
+ display::{error_message, render_colored_shell_prefix, render_shell_prefix, shrink},
+ error::{Error, Result},
+ output::{CommandOutput, with_loading},
+ runtime::{BeamApp, InvocationOverrides},
+};
+pub async fn run(app: &BeamApp) -> Result<()> {
+ let config = Config::default();
+ let mut editor = Editor::::with_history(config, ReplHistory::new())
+ .context("create beam repl editor")?;
+ editor.set_helper(Some(BeamHelper::new()));
+ bind_matching_prefix_history_search(&mut editor);
+ load_sanitized_history(editor.history_mut(), &app.paths.history)
+ .context("sanitize beam repl history")?;
+ let mut overrides = app.overrides.clone();
+ canonicalize_startup_wallet_override(app, &mut overrides).await?;
+
+ loop {
+ let session = session(app, &overrides);
+ let prompt = prompt(&session).await?;
+ if let Some(helper) = editor.helper_mut() {
+ helper.set_shell_prompt(prompt.plain.clone(), prompt.colored.clone());
+ }
+
+ match editor.readline(&prompt.plain) {
+ Ok(line) => {
+ let line = line.trim();
+ if line.is_empty() {
+ continue;
+ }
+ if should_persist_history(line) {
+ let _ = editor.add_history_entry(line);
+ let _ = editor.save_history(&app.paths.history);
+ }
+ if is_exit_command(line) {
+ break;
+ }
+ match handle_line(app, &mut overrides, line).await {
+ Ok(()) | Err(Error::Interrupted) => {}
+ Err(err) => {
+ eprintln!(
+ "{}",
+ error_message(&err.to_string(), app.color_mode.colors_stderr()),
+ );
+ }
+ }
+ }
+ Err(ReadlineError::Interrupted) => continue,
+ Err(ReadlineError::Eof) => break,
+ Err(err) => {
+ return Err(std::io::Error::other(err.to_string())
+ .context("read beam repl line")
+ .into());
+ }
+ }
+ }
+
+ let _ = editor.save_history(&app.paths.history);
+ Ok(())
+}
+
+pub(crate) fn load_sanitized_history(
+ history: &mut ReplHistory,
+ path: &Path,
+) -> rustyline::Result<()> {
+ let _ = history.load(path);
+ if sanitize_history(history)? {
+ let _ = history.save(path);
+ }
+ Ok(())
+}
+
+pub(crate) async fn canonicalize_startup_wallet_override(
+ app: &BeamApp,
+ overrides: &mut InvocationOverrides,
+) -> Result<()> {
+ if overrides.from.is_some() {
+ overrides.from = app
+ .canonical_wallet_selector(overrides.from.as_deref())
+ .await?;
+ }
+
+ Ok(())
+}
+
+async fn handle_line(app: &BeamApp, overrides: &mut InvocationOverrides, line: &str) -> Result<()> {
+ let parsed = parse_line(line)?;
+ let interrupt_owner = parsed.interrupt_owner();
+
+ run_with_interrupt_owner(
+ interrupt_owner,
+ handle_parsed_line(app, overrides, parsed),
+ tokio::signal::ctrl_c(),
+ )
+ .await
+}
+
+pub(crate) async fn handle_parsed_line(
+ app: &BeamApp,
+ overrides: &mut InvocationOverrides,
+ parsed: ParsedLine,
+) -> Result<()> {
+ match parsed {
+ ParsedLine::ReplCommand(args) => handle_repl_command(app, overrides, &args).await,
+ ParsedLine::Cli { args, cli } => {
+ let command_app = BeamApp {
+ overrides: merge_overrides(overrides, &cli.overrides()),
+ color_mode: resolved_color_mode(&args, &cli, app),
+ output_mode: resolved_output_mode(&args, &cli, app),
+ ..app.clone()
+ };
+
+ match cli.command {
+ Some(command) => {
+ let mutation = repl_state_mutation(&command);
+ let snapshot = match mutation.as_ref() {
+ Some(mutation) => {
+ Some(capture_repl_state(app, &command_app, overrides, mutation).await?)
+ }
+ None => None,
+ };
+ commands::run(&command_app, command).await?;
+ if let (Some(mutation), Some(snapshot)) = (mutation.as_ref(), snapshot) {
+ reconcile_repl_state(app, overrides, mutation, snapshot).await?;
+ }
+ Ok(())
+ }
+ None => Ok(()),
+ }
+ }
+ ParsedLine::CliError(err) => {
+ err.print().context("print beam repl clap error")?;
+ Ok(())
+ }
+ }
+}
+
+pub(crate) async fn handle_repl_command(
+ app: &BeamApp,
+ overrides: &mut InvocationOverrides,
+ args: &[String],
+) -> Result<()> {
+ let command = normalized_repl_command(args.first().map(String::as_str))
+ .ok_or_else(|| repl_err(args.first().cloned().unwrap_or_default()))?;
+
+ match command {
+ "wallets" => {
+ overrides.from = app
+ .canonical_wallet_selector(args.get(1).map(String::as_str))
+ .await?
+ }
+ "chains" => {
+ set_repl_chain_override(app, overrides, args.get(1).map(String::as_str)).await?
+ }
+ "rpc" => set_repl_rpc_override(app, overrides, args.get(1).map(String::as_str)).await?,
+ "balance" => {
+ commands::balance::run(&session(app, overrides), BalanceArgs { token: None }).await?
+ }
+ "tokens" => commands::tokens::list_tokens(&session(app, overrides)).await?,
+ "help" => {
+ let help = help_text();
+ CommandOutput::new(
+ help.clone(),
+ json!({ "cli_prefix_optional": true, "help": help }),
+ )
+ .print(app.output_mode)?
+ }
+ _ => unreachable!("validated repl command"),
+ }
+
+ Ok(())
+}
+
+pub(crate) async fn set_repl_chain_override(
+ app: &BeamApp,
+ overrides: &mut InvocationOverrides,
+ selection: Option<&str>,
+) -> Result<()> {
+ let next_chain = match selection {
+ Some(selection) => {
+ let available = app.chain_store.get().await;
+ let chain = find_chain(selection, &available)?;
+ Some(chain.key)
+ }
+ None => None,
+ };
+
+ // REPL chain switches reset any inherited or previously-selected RPC override so the
+ // session falls back to the new chain's configured default unless the user selects a new
+ // RPC explicitly.
+ overrides.chain = next_chain;
+ overrides.rpc = None;
+
+ Ok(())
+}
+
+pub(crate) async fn set_repl_rpc_override(
+ app: &BeamApp,
+ overrides: &mut InvocationOverrides,
+ rpc_url: Option<&str>,
+) -> Result<()> {
+ match rpc_url {
+ Some(rpc_url) => {
+ let chain = session(app, overrides).active_chain().await?;
+ with_loading(
+ app.output_mode,
+ format!("Validating RPC {rpc_url}..."),
+ async {
+ ensure_rpc_matches_chain_id(&chain.entry.key, chain.entry.chain_id, rpc_url)
+ .await
+ },
+ )
+ .await?;
+ overrides.rpc = Some(rpc_url.to_string());
+ }
+ None => overrides.rpc = None,
+ }
+
+ Ok(())
+}
+
+pub(crate) struct ReplPrompt {
+ plain: String,
+ colored: Option,
+}
+
+pub(crate) async fn prompt(app: &BeamApp) -> Result {
+ let selected_address = app.active_optional_address().await?;
+ let wallet = app.active_wallet().await.ok();
+ let chain = app.active_chain().await?;
+ let wallet_display = match (wallet.as_ref(), selected_address) {
+ (Some(wallet), _) => format!("{} {}", wallet.name, shrink(&wallet.address)),
+ (None, Some(address)) => shrink(&format!("{address:#x}")),
+ (None, None) => "no-wallet".to_string(),
+ };
+ let rpc_url = shrink(&chain.rpc_url);
+
+ Ok(ReplPrompt {
+ plain: render_shell_prefix(&wallet_display, &chain.entry.key, &rpc_url),
+ colored: app
+ .color_mode
+ .colors_stdout()
+ .then(|| render_colored_shell_prefix(&wallet_display, &chain.entry.key, &rpc_url)),
+ })
+}
+
+fn session(app: &BeamApp, ov: &InvocationOverrides) -> BeamApp {
+ BeamApp {
+ overrides: ov.clone(),
+ ..app.clone()
+ }
+}
diff --git a/pkg/beam-cli/src/commands/interactive_helper.rs b/pkg/beam-cli/src/commands/interactive_helper.rs
new file mode 100644
index 0000000..05c9401
--- /dev/null
+++ b/pkg/beam-cli/src/commands/interactive_helper.rs
@@ -0,0 +1,281 @@
+// lint-long-file-override allow-max-lines=300
+use std::{
+ borrow::Cow::{self, Borrowed, Owned},
+ collections::HashSet,
+};
+
+use clap::{Arg, Command, CommandFactory};
+use rustyline::{
+ CompletionType, Context, Helper,
+ completion::{Completer, Pair},
+ highlight::Highlighter,
+ hint::{Hinter, HistoryHinter},
+ validate::{ValidationContext, ValidationResult, Validator},
+};
+
+use super::interactive_suggestion::completion_hint;
+use crate::cli::Cli;
+
+const REPL_OPTIONS: &[&str] = &[
+ "wallets", "chains", "rpc", "balance", "tokens", "help", "exit",
+];
+const SUGGESTION_STYLE_PREFIX: &str = "\x1b[2m";
+const SUGGESTION_STYLE_SUFFIX: &str = "\x1b[0m";
+
+pub(crate) fn help_text() -> String {
+ let mut cli = Cli::command().subcommand(Command::new("exit").about("Exit interactive mode"));
+ cli.render_long_help().to_string()
+}
+
+pub(crate) fn completion_candidates(line: &str, pos: usize) -> Vec {
+ let head = &line[..pos];
+ let start = head
+ .rfind(|ch: char| ch.is_whitespace())
+ .map_or(0, |index| index + 1);
+ let needle = &head[start..];
+
+ let tokens = completion_tokens(&head[..start]);
+ let (root, current, expects_value) = completion_command(&tokens);
+ let mut candidates = Vec::new();
+
+ if tokens.is_empty() {
+ candidates.extend(
+ REPL_OPTIONS
+ .iter()
+ .map(|candidate| (*candidate).to_string()),
+ );
+ }
+
+ if !expects_value {
+ candidates.extend(current_visible_subcommands(¤t));
+ candidates.extend(current_visible_args(¤t, false));
+
+ if current.get_name() != root.get_name() {
+ candidates.extend(current_visible_args(&root, true));
+ }
+ }
+
+ candidates.extend(["-h".to_string(), "--help".to_string()]);
+ filter_candidates(candidates, needle)
+}
+
+fn completion_tokens(head: &str) -> Vec {
+ let mut tokens = shlex::split(head).unwrap_or_else(|| {
+ head.split_whitespace()
+ .map(str::to_string)
+ .collect::>()
+ });
+
+ if matches!(tokens.first().map(String::as_str), Some("beam")) {
+ tokens.remove(0);
+ }
+
+ tokens
+}
+
+fn completion_command(tokens: &[String]) -> (Command, Command, bool) {
+ let root = Cli::command();
+ let mut current = root.clone();
+ let mut expects_value = false;
+
+ for token in tokens {
+ if expects_value {
+ expects_value = false;
+ continue;
+ }
+
+ if token.starts_with('-') {
+ expects_value = arg_for_token(¤t, &root, token)
+ .is_some_and(|arg| arg_takes_value(arg) && !token.contains('='));
+ continue;
+ }
+
+ if let Some(subcommand) = current.find_subcommand(token) {
+ current = subcommand.clone();
+ }
+ }
+
+ (root, current, expects_value)
+}
+
+fn current_visible_subcommands(command: &Command) -> Vec {
+ command
+ .get_subcommands()
+ .filter(|subcommand| !subcommand.is_hide_set())
+ .flat_map(|subcommand| {
+ std::iter::once(subcommand.get_name().to_string())
+ .chain(subcommand.get_all_aliases().map(str::to_string))
+ })
+ .collect()
+}
+
+fn current_visible_args(command: &Command, globals_only: bool) -> Vec {
+ command
+ .get_arguments()
+ .filter(|arg| !arg.is_hide_set())
+ .filter(|arg| !globals_only || arg.is_global_set())
+ .flat_map(arg_spellings)
+ .collect()
+}
+
+fn arg_spellings(arg: &Arg) -> Vec {
+ let mut values = Vec::new();
+
+ if let Some(short) = arg.get_short() {
+ values.push(format!("-{short}"));
+ }
+ if let Some(aliases) = arg.get_short_and_visible_aliases() {
+ values.extend(aliases.into_iter().map(|short| format!("-{short}")));
+ }
+ if let Some(long) = arg.get_long() {
+ values.push(format!("--{long}"));
+ }
+ if let Some(aliases) = arg.get_long_and_visible_aliases() {
+ values.extend(aliases.into_iter().map(|long| format!("--{long}")));
+ }
+
+ values
+}
+
+fn arg_for_token<'a>(current: &'a Command, root: &'a Command, token: &str) -> Option<&'a Arg> {
+ find_arg(current, token).or_else(|| find_arg(root, token).filter(|arg| arg.is_global_set()))
+}
+
+fn find_arg<'a>(command: &'a Command, token: &str) -> Option<&'a Arg> {
+ if let Some(long) = token.strip_prefix("--") {
+ let long = long.split('=').next().unwrap_or(long);
+ return command.get_arguments().find(|arg| {
+ arg.get_long() == Some(long)
+ || arg
+ .get_long_and_visible_aliases()
+ .is_some_and(|aliases| aliases.into_iter().any(|alias| alias == long))
+ });
+ }
+
+ if let Some(short) = token.strip_prefix('-') {
+ let short = short.chars().next()?;
+ return command.get_arguments().find(|arg| {
+ arg.get_short() == Some(short)
+ || arg
+ .get_short_and_visible_aliases()
+ .is_some_and(|aliases| aliases.into_iter().any(|alias| alias == short))
+ });
+ }
+
+ None
+}
+
+fn arg_takes_value(arg: &Arg) -> bool {
+ arg.get_action().takes_values()
+}
+
+fn filter_candidates(candidates: impl IntoIterator- , needle: &str) -> Vec {
+ let mut seen = HashSet::new();
+
+ candidates
+ .into_iter()
+ .filter(|candidate| candidate.starts_with(needle))
+ .filter(|candidate| seen.insert(candidate.clone()))
+ .collect()
+}
+
+#[derive(Default)]
+struct ShellPrompt {
+ plain: String,
+ colored: String,
+}
+
+#[derive(Default)]
+pub(crate) struct BeamHelper {
+ shell_prompt: Option,
+}
+
+impl BeamHelper {
+ pub(crate) fn new() -> Self {
+ Self::default()
+ }
+
+ pub(crate) fn set_shell_prompt(&mut self, plain: String, colored: Option) {
+ self.shell_prompt = colored.map(|colored| ShellPrompt { plain, colored });
+ }
+}
+
+impl Helper for BeamHelper {}
+
+impl Highlighter for BeamHelper {
+ fn highlight_prompt<'b, 's: 'b, 'p: 'b>(
+ &'s self,
+ prompt: &'p str,
+ default: bool,
+ ) -> Cow<'b, str> {
+ let _ = default;
+
+ match &self.shell_prompt {
+ Some(shell_prompt) if prompt == shell_prompt.plain => {
+ Borrowed(shell_prompt.colored.as_str())
+ }
+ _ => Borrowed(prompt),
+ }
+ }
+
+ fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> {
+ Owned(format!(
+ "{SUGGESTION_STYLE_PREFIX}{hint}{SUGGESTION_STYLE_SUFFIX}"
+ ))
+ }
+
+ fn highlight_candidate<'c>(
+ &self,
+ candidate: &'c str,
+ completion: CompletionType,
+ ) -> Cow<'c, str> {
+ let _ = completion;
+
+ Owned(format!(
+ "{SUGGESTION_STYLE_PREFIX}{candidate}{SUGGESTION_STYLE_SUFFIX}"
+ ))
+ }
+}
+
+impl Hinter for BeamHelper {
+ type Hint = String;
+
+ fn hint(&self, line: &str, pos: usize, ctx: &Context<'_>) -> Option {
+ HistoryHinter::default()
+ .hint(line, pos, ctx)
+ .or_else(|| completion_hint(line, pos))
+ }
+}
+
+impl Validator for BeamHelper {
+ fn validate(&self, _ctx: &mut ValidationContext<'_>) -> rustyline::Result {
+ Ok(ValidationResult::Valid(None))
+ }
+}
+
+impl Completer for BeamHelper {
+ type Candidate = Pair;
+
+ fn complete(
+ &self,
+ line: &str,
+ pos: usize,
+ _ctx: &Context<'_>,
+ ) -> rustyline::Result<(usize, Vec)> {
+ let head = &line[..pos];
+ let start = head
+ .rfind(|ch: char| ch.is_whitespace())
+ .map_or(0, |index| index + 1);
+
+ Ok((
+ start,
+ completion_candidates(line, pos)
+ .into_iter()
+ .map(|candidate| Pair {
+ display: candidate.clone(),
+ replacement: candidate,
+ })
+ .collect(),
+ ))
+ }
+}
diff --git a/pkg/beam-cli/src/commands/interactive_history.rs b/pkg/beam-cli/src/commands/interactive_history.rs
new file mode 100644
index 0000000..25658af
--- /dev/null
+++ b/pkg/beam-cli/src/commands/interactive_history.rs
@@ -0,0 +1,284 @@
+// lint-long-file-override allow-max-lines=300
+use std::path::Path;
+
+use clap::Parser;
+use rustyline::{
+ Cmd, ConditionalEventHandler, Config, Editor, Event, EventContext, EventHandler, Helper,
+ KeyCode, KeyEvent, Modifiers, RepeatCount,
+ history::{DefaultHistory, History, SearchDirection, SearchResult},
+};
+
+use crate::cli::Cli;
+
+pub(crate) struct ReplHistory {
+ inner: DefaultHistory,
+}
+
+impl ReplHistory {
+ pub(crate) fn new() -> Self {
+ Self::with_config(&Config::default())
+ }
+
+ pub(crate) fn with_config(config: &Config) -> Self {
+ Self {
+ inner: DefaultHistory::with_config(config),
+ }
+ }
+
+ pub(crate) fn iter(&self) -> impl DoubleEndedIterator
- + '_ {
+ self.inner.iter()
+ }
+}
+
+impl Default for ReplHistory {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl History for ReplHistory {
+ fn get(
+ &self,
+ index: usize,
+ dir: SearchDirection,
+ ) -> rustyline::Result