diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a709048..0eead7e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -234,19 +234,29 @@ jobs: aws-access-key-id: ${{ secrets.DEPLOYER_AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.DEPLOYER_AWS_SECRET_ACCESS_KEY }} - # Staging deployment (mason) - triggered on staging branch - build-and-deploy-staging: + # Staging Docker builds - parallel via matrix + build-staging-images: if: github.ref == 'refs/heads/staging' runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - service: admin + dockerfile: docker/admin/Dockerfile + repository: mark-admin + image_tag: mark-admin-${{ github.sha }} + - service: handler + dockerfile: docker/handler/Dockerfile + repository: mark-handler + image_tag: mark-handler-${{ github.sha }} + - service: poller + dockerfile: docker/poller/Dockerfile + repository: mark-poller + image_tag: mark-poller-${{ github.sha }} env: AWS_REGION: sa-east-1 REGISTRY: 679752396206.dkr.ecr.sa-east-1.amazonaws.com - POLLER_REPOSITORY: mark-poller - POLLER_IMAGE_TAG: mark-poller-${{ github.sha }} - HANDLER_REPOSITORY: mark-handler - HANDLER_IMAGE_TAG: mark-handler-${{ github.sha }} - ADMIN_REPOSITORY: mark-admin - ADMIN_IMAGE_TAG: mark-admin-${{ github.sha }} permissions: contents: read packages: write @@ -268,37 +278,61 @@ jobs: with: mask-password: 'true' - - name: Ensure ECR repositories exist + - name: Ensure ECR repository exists run: | - # Create repositories if they don't exist - aws ecr describe-repositories --repository-names $ADMIN_REPOSITORY --region $AWS_REGION || \ - aws ecr create-repository --repository-name $ADMIN_REPOSITORY --region $AWS_REGION --image-scanning-configuration scanOnPush=true --image-tag-mutability MUTABLE - aws ecr describe-repositories --repository-names $HANDLER_REPOSITORY --region $AWS_REGION || \ - aws ecr create-repository --repository-name $HANDLER_REPOSITORY --region $AWS_REGION --image-scanning-configuration scanOnPush=true --image-tag-mutability MUTABLE - aws ecr describe-repositories --repository-names $POLLER_REPOSITORY --region $AWS_REGION || \ - aws ecr create-repository --repository-name $POLLER_REPOSITORY --region $AWS_REGION --image-scanning-configuration scanOnPush=true --image-tag-mutability MUTABLE + aws ecr describe-repositories --repository-names ${{ matrix.repository }} --region $AWS_REGION || \ + aws ecr create-repository --repository-name ${{ matrix.repository }} --region $AWS_REGION --image-scanning-configuration scanOnPush=true --image-tag-mutability MUTABLE - - name: Build and push Admin Docker image - run: | - docker build --provenance=false --sbom=false -f docker/admin/Dockerfile -t $REGISTRY/$ADMIN_REPOSITORY:$ADMIN_IMAGE_TAG . - docker push $REGISTRY/$ADMIN_REPOSITORY:$ADMIN_IMAGE_TAG + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - - name: Build and push Invoice Handler Docker image - run: | - docker build -f docker/handler/Dockerfile -t $REGISTRY/$HANDLER_REPOSITORY:$HANDLER_IMAGE_TAG . - docker push $REGISTRY/$HANDLER_REPOSITORY:$HANDLER_IMAGE_TAG + - name: Build and push ${{ matrix.service }} Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: ${{ matrix.dockerfile }} + push: true + tags: ${{ env.REGISTRY }}/${{ matrix.repository }}:${{ matrix.image_tag }} + cache-from: type=gha,scope=staging-${{ matrix.service }} + cache-to: type=gha,scope=staging-${{ matrix.service }},mode=max + provenance: false + sbom: false + + # Staging deployment - runs after all images are built + deploy-staging: + if: github.ref == 'refs/heads/staging' + needs: build-staging-images + runs-on: ubuntu-latest + env: + AWS_REGION: sa-east-1 + REGISTRY: 679752396206.dkr.ecr.sa-east-1.amazonaws.com + POLLER_REPOSITORY: mark-poller + POLLER_IMAGE_TAG: mark-poller-${{ github.sha }} + HANDLER_REPOSITORY: mark-handler + HANDLER_IMAGE_TAG: mark-handler-${{ github.sha }} + ADMIN_REPOSITORY: mark-admin + ADMIN_IMAGE_TAG: mark-admin-${{ github.sha }} + permissions: + contents: read + packages: write - - name: Build and push Poller Docker image - run: | - docker build --provenance=false --sbom=false -f docker/poller/Dockerfile -t $REGISTRY/$POLLER_REPOSITORY:$POLLER_IMAGE_TAG . - docker push $REGISTRY/$POLLER_REPOSITORY:$POLLER_IMAGE_TAG + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-region: sa-east-1 + aws-access-key-id: ${{ secrets.DEPLOYER_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.DEPLOYER_AWS_SECRET_ACCESS_KEY }} # ============================================================================ # POLLER REMOVAL - TEMPORARY # ============================================================================ # Remove only the main poller Lambda function (mark_poller) before deploying the invoice # handler to prevent duplicate intent creation. Other poller Lambdas remain active. - # + # # TODO: Remove this step once poller migration is complete # ============================================================================ - name: Remove Main Poller Lambda Function diff --git a/config/shard-manifest-mason.json b/config/shard-manifest-mason.json index 1290a0e0..65b0d41a 100644 --- a/config/shard-manifest-mason.json +++ b/config/shard-manifest-mason.json @@ -29,8 +29,8 @@ "secretId": "mason-fastfill-signer-pk-share2" }, "method": "shamir", - "required": true, - "_comment": "Fill Service signer private key for fast fills" + "required": false, + "_comment": "Fill Service signer private key for fast fills (optional - only needed when fill service is enabled)" }, { "path": "ton.mnemonic", @@ -40,8 +40,8 @@ "secretId": "mason-ton-mnemonic-share2" }, "method": "shamir", - "required": true, - "_comment": "TON wallet mnemonic for TAC bridge operations" + "required": false, + "_comment": "TON wallet mnemonic for TAC bridge operations (optional - only needed when TAC rebalance is enabled)" }, { "path": "solana.privateKey", @@ -52,7 +52,7 @@ }, "method": "shamir", "required": true, - "_comment": "Solana wallet private key (base58 encoded)" + "_comment": "Solana wallet private key (required for the dedicated Solana USDC poller)" }, { "path": "binance.apiSecret", diff --git a/docker/admin/Dockerfile b/docker/admin/Dockerfile index 3bea8d2c..12ff83fc 100644 --- a/docker/admin/Dockerfile +++ b/docker/admin/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1 FROM public.ecr.aws/lambda/nodejs:20 AS node # ---------------------------------------- @@ -53,9 +54,10 @@ COPY yarn.lock /tmp/build/ # Install dependencies including devDependencies # Note: --mode=skip-build skips preinstall/postinstall scripts during install # This avoids the "npx only-allow pnpm" check in @eth-optimism/core-utils -# Clear yarn cache before install to avoid corrupted package downloads -# Retry install on failure to handle transient npm registry issues -RUN yarn cache clean --all && \ +# Cache mount speeds up yarn install when Docker layer cache misses +# Clear cache and retry on failure to handle corrupted downloads or transient registry issues +ENV YARN_CACHE_FOLDER=/tmp/yarn-cache +RUN --mount=type=cache,target=/tmp/yarn-cache \ yarn install --immutable --mode=skip-build || \ (yarn cache clean --all && sleep 2 && yarn install --immutable --mode=skip-build) && \ yarn workspaces foreach -A run rebuild @@ -76,18 +78,11 @@ COPY packages/adapters/database /tmp/build/packages/adapters/database COPY tsconfig.json /tmp/build/ # Build packages -# Build only the workspaces needed for admin (not all workspaces) -# Build dependencies first, then admin -RUN yarn workspace @mark/core build && \ - yarn workspace @mark/logger build && \ - yarn workspace @mark/cache build && \ - yarn workspace @mark/chainservice build && \ - yarn workspace @mark/database build && \ - yarn workspace @mark/everclear build && \ - yarn workspace @mark/prometheus build && \ - yarn workspace @mark/web3signer build && \ - yarn workspace @mark/rebalance build && \ - yarn workspace @mark/admin build +# Build packages in topological order (respects declared deps) +# Exclude root workspace (its build script would re-run foreach without our excludes) +# Exclude packages not needed by admin +RUN yarn workspaces foreach -Av --topological-dev \ + --exclude mark --exclude @mark/handler --exclude @mark/poller --exclude @mark/agent --exclude @mark/webhooks run build # ---------------------------------------- # Runtime stage diff --git a/docker/handler/Dockerfile b/docker/handler/Dockerfile index d892b5e4..5670cc98 100644 --- a/docker/handler/Dockerfile +++ b/docker/handler/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1 FROM node:20 AS base # ---------------------------------------- @@ -53,7 +54,9 @@ COPY yarn.lock /tmp/build/ # Note: --mode=skip-build skips preinstall/postinstall scripts during install # This avoids the "npx only-allow pnpm" check in @eth-optimism/core-utils # Then we run rebuild to build native modules with our build tools -RUN yarn install --immutable --mode=skip-build && \ +ENV YARN_CACHE_FOLDER=/tmp/yarn-cache +RUN --mount=type=cache,target=/tmp/yarn-cache \ + yarn install --immutable --mode=skip-build && \ yarn workspaces foreach -A run rebuild # Copy source files @@ -73,10 +76,11 @@ COPY packages/adapters/database /tmp/build/packages/adapters/database COPY packages/adapters/webhooks /tmp/build/packages/adapters/webhooks COPY tsconfig.json /tmp/build/ -# Build packages -# Build core first to ensure declaration files are available -RUN yarn workspace @mark/core build && \ - yarn build +# Build packages in topological order (respects declared deps) +# Exclude root workspace (its build script would re-run foreach without our excludes) +# Exclude admin — not needed by handler +RUN yarn workspaces foreach -Av --topological-dev \ + --exclude mark --exclude @mark/admin run build # ---------------------------------------- # Runtime stage diff --git a/docker/poller/Dockerfile b/docker/poller/Dockerfile index 13ae2267..2ac91c99 100644 --- a/docker/poller/Dockerfile +++ b/docker/poller/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1 FROM public.ecr.aws/lambda/nodejs:20 AS node # ---------------------------------------- @@ -53,9 +54,10 @@ COPY yarn.lock /tmp/build/ # Install dependencies including devDependencies # Note: --mode=skip-build skips preinstall/postinstall scripts during install # This avoids the "npx only-allow pnpm" check in @eth-optimism/core-utils -# Clear yarn cache before install to avoid corrupted package downloads -# Retry install on failure to handle transient npm registry issues -RUN yarn cache clean --all && \ +# Cache mount speeds up yarn install when Docker layer cache misses +# Clear cache and retry on failure to handle corrupted downloads or transient registry issues +ENV YARN_CACHE_FOLDER=/tmp/yarn-cache +RUN --mount=type=cache,target=/tmp/yarn-cache \ yarn install --immutable --mode=skip-build || \ (yarn cache clean --all && sleep 2 && yarn install --immutable --mode=skip-build) && \ yarn workspaces foreach -A run rebuild @@ -76,19 +78,11 @@ COPY packages/adapters/database /tmp/build/packages/adapters/database COPY tsconfig.json /tmp/build/ # Build packages -# Build only the workspaces needed for poller (not all workspaces) -# Build dependencies first, then poller -RUN yarn workspace @mark/core build && \ - yarn workspace @mark/logger build && \ - yarn workspace @mark/cache build && \ - yarn workspace @mark/chainservice build && \ - yarn workspace @mark/database build && \ - yarn workspace @mark/everclear build && \ - yarn workspace @mark/prometheus build && \ - yarn workspace @mark/web3signer build && \ - yarn workspace @mark/rebalance build && \ - yarn workspace @mark/agent build && \ - yarn workspace @mark/poller build +# Build packages in topological order (respects declared deps) +# Exclude root workspace (its build script would re-run foreach without our excludes) +# Exclude packages not needed by poller +RUN yarn workspaces foreach -Av --topological-dev \ + --exclude mark --exclude @mark/admin --exclude @mark/handler --exclude @mark/webhooks run build # ---------------------------------------- # Runtime stage @@ -97,7 +91,14 @@ RUN yarn workspace @mark/core build && \ FROM node AS runtime # Install dbmate for database migrations -RUN curl -fsSL -o /usr/local/bin/dbmate https://github.com/amacneil/dbmate/releases/latest/download/dbmate-linux-amd64 && \ +# Pin to specific version and verify SHA256 checksum for supply chain security +ARG DBMATE_VERSION=v2.29.3 +ARG DBMATE_SHA256=2bb1554a32d9c0bd544841d3523eae64fd60a58d7720c5d82900043dc5e87a6c +RUN set -eux; \ + curl -fsSL -o /tmp/dbmate \ + https://github.com/amacneil/dbmate/releases/download/${DBMATE_VERSION}/dbmate-linux-amd64; \ + echo "${DBMATE_SHA256} /tmp/dbmate" | sha256sum -c -; \ + mv /tmp/dbmate /usr/local/bin/dbmate; \ chmod +x /usr/local/bin/dbmate ENV NODE_ENV=production \ diff --git a/ops/mainnet/mark/config.tf b/ops/mainnet/mark/config.tf index 7cda4e77..d46b61b6 100644 --- a/ops/mainnet/mark/config.tf +++ b/ops/mainnet/mark/config.tf @@ -56,12 +56,14 @@ locals { } ] - # NOTE: TAC/METH rebalance config is loaded from SSM at runtime (not as env vars) + # NOTE: TAC/METH/aManUSDe/aMansyrupUSDT rebalance config is loaded from SSM at runtime (not as env vars) # to stay under AWS Lambda's 4KB env var limit. - # + # # SSM-loaded config (via MARK_CONFIG_SSM_PARAMETER): # - tacRebalance.* (all TAC_REBALANCE_* values) - # - methRebalance.* (all METH_REBALANCE_* values) + # - methRebalance.* (all METH_REBALANCE_* values) + # - aManUsdeRebalance.* (all AMANUSDE_REBALANCE_* values) + # - aMansyrupUsdtRebalance.* (all AMANSYRUPUSDT_REBALANCE_* values) # - ton.mnemonic, tonSignerAddress # # See packages/core/src/config.ts for the fallback logic. @@ -141,6 +143,28 @@ locals { } ) + # aManUSDe rebalancing poller configuration + # Flow: USDC (ETH) → Stargate Bridge → USDC (Mantle) → DEX Swap → USDe → Aave Supply → aManUSDe + amanusde_poller_env_vars = merge( + local.poller_env_vars, + { + RUN_MODE = "aManUsdeOnly" + AMANUSDE_AAVE_POOL_ADDRESS = "0x458F293454fE0d67EC0655f3672301301DD51422" + AMANUSDE_DEX_SWAP_SLIPPAGE_BPS = "100" + } + ) + + # aMansyrupUSDT rebalancing poller configuration + # Flow: USDC (ETH) → Stargate Bridge → USDC (Mantle) → DEX Swap → syrupUSDT → Aave Supply → aMansyrupUSDT + amansyrupusdt_poller_env_vars = merge( + local.poller_env_vars, + { + RUN_MODE = "aMansyrupUsdtOnly" + AMANSYRUPUSDT_AAVE_POOL_ADDRESS = "0x458F293454fE0d67EC0655f3672301301DD51422" + AMANSYRUPUSDT_DEX_SWAP_SLIPPAGE_BPS = "100" + } + ) + web3signer_env_vars = [ { name = "WEB3_SIGNER_PRIVATE_KEY" diff --git a/ops/mainnet/mark/main.tf b/ops/mainnet/mark/main.tf index 2375aa16..0e4cb98a 100644 --- a/ops/mainnet/mark/main.tf +++ b/ops/mainnet/mark/main.tf @@ -105,6 +105,54 @@ locals { maxRebalanceAmount = try(local.mark_config_json.methRebalance.bridge.maxRebalanceAmount, "") } } + # aManUSDe Rebalance configuration + aManUsdeRebalance = { + enabled = try(local.mark_config_json.aManUsdeRebalance.enabled, false) + marketMaker = { + address = try(local.mark_config_json.aManUsdeRebalance.marketMaker.address, "") + onDemandEnabled = try(local.mark_config_json.aManUsdeRebalance.marketMaker.onDemandEnabled, false) + thresholdEnabled = try(local.mark_config_json.aManUsdeRebalance.marketMaker.thresholdEnabled, false) + threshold = try(local.mark_config_json.aManUsdeRebalance.marketMaker.threshold, "") + targetBalance = try(local.mark_config_json.aManUsdeRebalance.marketMaker.targetBalance, "") + } + fillService = { + address = try(local.mark_config_json.aManUsdeRebalance.fillService.address, "") + senderAddress = try(local.mark_config_json.aManUsdeRebalance.fillService.senderAddress, "") + thresholdEnabled = try(local.mark_config_json.aManUsdeRebalance.fillService.thresholdEnabled, false) + threshold = try(local.mark_config_json.aManUsdeRebalance.fillService.threshold, "") + targetBalance = try(local.mark_config_json.aManUsdeRebalance.fillService.targetBalance, "") + allowCrossWalletRebalancing = try(local.mark_config_json.aManUsdeRebalance.fillService.allowCrossWalletRebalancing, false) + } + bridge = { + slippageDbps = try(local.mark_config_json.aManUsdeRebalance.bridge.slippageDbps, 50) # 0.5% default + minRebalanceAmount = try(local.mark_config_json.aManUsdeRebalance.bridge.minRebalanceAmount, "") + maxRebalanceAmount = try(local.mark_config_json.aManUsdeRebalance.bridge.maxRebalanceAmount, "") + } + } + # aMansyrupUSDT Rebalance configuration + aMansyrupUsdtRebalance = { + enabled = try(local.mark_config_json.aMansyrupUsdtRebalance.enabled, false) + marketMaker = { + address = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.address, "") + onDemandEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.onDemandEnabled, false) + thresholdEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.thresholdEnabled, false) + threshold = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.threshold, "") + targetBalance = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.targetBalance, "") + } + fillService = { + address = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.address, "") + senderAddress = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.senderAddress, "") + thresholdEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.thresholdEnabled, false) + threshold = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.threshold, "") + targetBalance = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.targetBalance, "") + allowCrossWalletRebalancing = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.allowCrossWalletRebalancing, false) + } + bridge = { + slippageDbps = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.slippageDbps, 500) # 5% default + minRebalanceAmount = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.minRebalanceAmount, "") + maxRebalanceAmount = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.maxRebalanceAmount, "") + } + } # Solana configuration for CCIP bridge operations solana = { privateKey = try(local.mark_config_json.solana.privateKey, "") @@ -435,6 +483,36 @@ module "mark_poller_meth_only" { }) } +# aManUSDe-only Lambda - runs aManUSDe rebalancing every 5 minutes +# Flow: USDC (ETH) → Stargate → USDC (Mantle) → DEX Swap → USDe → Aave Supply → aManUSDe +module "mark_poller_amanusde_only" { + source = "../../modules/lambda" + stage = var.stage + environment = var.environment + container_family = "${var.bot_name}-poller-amanusde" + execution_role_arn = module.iam.lambda_role_arn + image_uri = var.image_uri + subnet_ids = module.network.private_subnets + security_group_id = module.sgs.lambda_sg_id + schedule_expression = "rate(5 minutes)" + container_env_vars = local.amanusde_poller_env_vars +} + +# aMansyrupUSDT-only Lambda - runs aMansyrupUSDT rebalancing every 5 minutes +# Flow: USDC (ETH) → Stargate → USDC (Mantle) → DEX Swap → syrupUSDT → Aave Supply → aMansyrupUSDT +module "mark_poller_amansyrupusdt_only" { + source = "../../modules/lambda" + stage = var.stage + environment = var.environment + container_family = "${var.bot_name}-poller-amansyrupusdt" + execution_role_arn = module.iam.lambda_role_arn + image_uri = var.image_uri + subnet_ids = module.network.private_subnets + security_group_id = module.sgs.lambda_sg_id + schedule_expression = "rate(5 minutes)" + container_env_vars = local.amansyrupusdt_poller_env_vars +} + module "iam" { source = "../../modules/iam" environment = var.environment diff --git a/ops/mainnet/mason/config.tf b/ops/mainnet/mason/config.tf index c0c0f36b..40d42453 100644 --- a/ops/mainnet/mason/config.tf +++ b/ops/mainnet/mason/config.tf @@ -31,14 +31,14 @@ locals { awsParamName = "/mason/config/web3_fastfill_signer_private_key_share1" gcpSecretRef = { project = "everclear-staging", secretId = "mason-fastfill-signer-pk-share2" } method = "shamir" - required = true + required = false }, { path = "ton.mnemonic" awsParamName = "/mason/config/ton_mnemonic_share1" gcpSecretRef = { project = "everclear-staging", secretId = "mason-ton-mnemonic-share2" } method = "shamir" - required = true + required = false }, { path = "solana.privateKey" @@ -107,12 +107,14 @@ locals { } ] - # NOTE: TAC/METH rebalance config is loaded from SSM at runtime (not as env vars) + # NOTE: TAC/METH/aManUSDe/aMansyrupUSDT rebalance config is loaded from SSM at runtime (not as env vars) # to stay under AWS Lambda's 4KB env var limit. - # + # # SSM-loaded config (via MARK_CONFIG_SSM_PARAMETER): # - tacRebalance.* (all TAC_REBALANCE_* values) - # - methRebalance.* (all METH_REBALANCE_* values) + # - methRebalance.* (all METH_REBALANCE_* values) + # - aManUsdeRebalance.* (all AMANUSDE_REBALANCE_* values) + # - aMansyrupUsdtRebalance.* (all AMANSYRUPUSDT_REBALANCE_* values) # - ton.mnemonic, tonSignerAddress # # See packages/core/src/config.ts for the fallback logic. @@ -200,6 +202,28 @@ locals { } ) + # aManUSDe rebalancing poller configuration + # Flow: USDC (ETH) → Stargate Bridge → USDC (Mantle) → DEX Swap → USDe → Aave Supply → aManUSDe + amanusde_poller_env_vars = merge( + local.poller_env_vars, + { + RUN_MODE = "aManUsdeOnly" + AMANUSDE_AAVE_POOL_ADDRESS = "0x458F293454fE0d67EC0655f3672301301DD51422" + AMANUSDE_DEX_SWAP_SLIPPAGE_BPS = "100" + } + ) + + # aMansyrupUSDT rebalancing poller configuration + # Flow: USDC (ETH) → Stargate Bridge → USDC (Mantle) → DEX Swap → syrupUSDT → Aave Supply → aMansyrupUSDT + amansyrupusdt_poller_env_vars = merge( + local.poller_env_vars, + { + RUN_MODE = "aMansyrupUsdtOnly" + AMANSYRUPUSDT_AAVE_POOL_ADDRESS = "0x458F293454fE0d67EC0655f3672301301DD51422" + AMANSYRUPUSDT_DEX_SWAP_SLIPPAGE_BPS = "100" + } + ) + web3signer_env_vars = [ { name = "WEB3_SIGNER_PRIVATE_KEY" diff --git a/ops/mainnet/mason/main.tf b/ops/mainnet/mason/main.tf index 7cc37213..662e2344 100644 --- a/ops/mainnet/mason/main.tf +++ b/ops/mainnet/mason/main.tf @@ -119,6 +119,54 @@ locals { maxRebalanceAmount = try(local.mark_config_json.methRebalance.bridge.maxRebalanceAmount, "") } } + # aManUSDe Rebalance configuration + aManUsdeRebalance = { + enabled = try(local.mark_config_json.aManUsdeRebalance.enabled, false) + marketMaker = { + address = try(local.mark_config_json.aManUsdeRebalance.marketMaker.address, "") + onDemandEnabled = try(local.mark_config_json.aManUsdeRebalance.marketMaker.onDemandEnabled, false) + thresholdEnabled = try(local.mark_config_json.aManUsdeRebalance.marketMaker.thresholdEnabled, false) + threshold = try(local.mark_config_json.aManUsdeRebalance.marketMaker.threshold, "") + targetBalance = try(local.mark_config_json.aManUsdeRebalance.marketMaker.targetBalance, "") + } + fillService = { + address = try(local.mark_config_json.aManUsdeRebalance.fillService.address, "") + senderAddress = try(local.mark_config_json.aManUsdeRebalance.fillService.senderAddress, "") + thresholdEnabled = try(local.mark_config_json.aManUsdeRebalance.fillService.thresholdEnabled, false) + threshold = try(local.mark_config_json.aManUsdeRebalance.fillService.threshold, "") + targetBalance = try(local.mark_config_json.aManUsdeRebalance.fillService.targetBalance, "") + allowCrossWalletRebalancing = try(local.mark_config_json.aManUsdeRebalance.fillService.allowCrossWalletRebalancing, false) + } + bridge = { + slippageDbps = try(local.mark_config_json.aManUsdeRebalance.bridge.slippageDbps, 50) # 0.5% default + minRebalanceAmount = try(local.mark_config_json.aManUsdeRebalance.bridge.minRebalanceAmount, "") + maxRebalanceAmount = try(local.mark_config_json.aManUsdeRebalance.bridge.maxRebalanceAmount, "") + } + } + # aMansyrupUSDT Rebalance configuration + aMansyrupUsdtRebalance = { + enabled = try(local.mark_config_json.aMansyrupUsdtRebalance.enabled, false) + marketMaker = { + address = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.address, "") + onDemandEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.onDemandEnabled, false) + thresholdEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.thresholdEnabled, false) + threshold = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.threshold, "") + targetBalance = try(local.mark_config_json.aMansyrupUsdtRebalance.marketMaker.targetBalance, "") + } + fillService = { + address = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.address, "") + senderAddress = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.senderAddress, "") + thresholdEnabled = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.thresholdEnabled, false) + threshold = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.threshold, "") + targetBalance = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.targetBalance, "") + allowCrossWalletRebalancing = try(local.mark_config_json.aMansyrupUsdtRebalance.fillService.allowCrossWalletRebalancing, false) + } + bridge = { + slippageDbps = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.slippageDbps, 500) # 5% default + minRebalanceAmount = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.minRebalanceAmount, "") + maxRebalanceAmount = try(local.mark_config_json.aMansyrupUsdtRebalance.bridge.maxRebalanceAmount, "") + } + } } } @@ -418,6 +466,36 @@ module "mark_poller_meth_only" { }) } +# aManUSDe-only Lambda - runs aManUSDe rebalancing every 5 minutes +# Flow: USDC (ETH) → Stargate → USDC (Mantle) → DEX Swap → USDe → Aave Supply → aManUSDe +module "mark_poller_amanusde_only" { + source = "../../modules/lambda" + stage = var.stage + environment = var.environment + container_family = "${var.bot_name}-poller-amanusde" + execution_role_arn = module.iam.lambda_role_arn + image_uri = var.image_uri + subnet_ids = module.network.private_subnets + security_group_id = module.sgs.lambda_sg_id + schedule_expression = "rate(5 minutes)" + container_env_vars = local.amanusde_poller_env_vars +} + +# aMansyrupUSDT-only Lambda - runs aMansyrupUSDT rebalancing every 5 minutes +# Flow: USDC (ETH) → Stargate → USDC (Mantle) → DEX Swap → syrupUSDT → Aave Supply → aMansyrupUSDT +module "mark_poller_amansyrupusdt_only" { + source = "../../modules/lambda" + stage = var.stage + environment = var.environment + container_family = "${var.bot_name}-poller-amansyrupusdt" + execution_role_arn = module.iam.lambda_role_arn + image_uri = var.image_uri + subnet_ids = module.network.private_subnets + security_group_id = module.sgs.lambda_sg_id + schedule_expression = "rate(5 minutes)" + container_env_vars = local.amansyrupusdt_poller_env_vars +} + # Invoice Handler ECS Service - replaces poller Lambda functions # Exposed via public ALB for Goldsky webhook access module "mark_invoice_handler" { diff --git a/package.json b/package.json index cc87a180..6c6e81dc 100644 --- a/package.json +++ b/package.json @@ -26,6 +26,7 @@ "aws:setup": "npx ts-node scripts/aws-shard-setup.ts --manifest", "build": "yarn workspaces foreach -Av --parallel --topological-dev run build", "clean": "yarn workspaces foreach -A --parallel run clean", + "e2e:rebalance": "yarn workspace @mark/poller e2e", "gcp:setup": "npx ts-node scripts/gcp-shard-setup.ts --manifest", "lint": "yarn lint:package && yarn workspaces foreach -A --parallel run lint", "lint:fix": "yarn workspaces foreach -A --parallel run lint:fix", @@ -42,14 +43,6 @@ "dependencies": { "@types/node": "20.17.12" }, - "dependenciesMeta": { - "@eth-optimism/contracts-bedrock": { - "built": false - }, - "@eth-optimism/core-utils": { - "built": false - } - }, "devDependencies": { "@commitlint/cli": "19.6.1", "@commitlint/config-conventional": "19.6.0", @@ -74,5 +67,13 @@ "tsc-alias": "1.8.10", "typescript": "5.7.2" }, + "dependenciesMeta": { + "@eth-optimism/contracts-bedrock": { + "built": false + }, + "@eth-optimism/core-utils": { + "built": false + } + }, "packageManager": "yarn@3.3.1" } diff --git a/packages/adapters/rebalance/src/actions/dex-swap.ts b/packages/adapters/rebalance/src/actions/dex-swap.ts index 94bcd07a..a3ecdf48 100644 --- a/packages/adapters/rebalance/src/actions/dex-swap.ts +++ b/packages/adapters/rebalance/src/actions/dex-swap.ts @@ -149,12 +149,15 @@ export class DexSwapActionHandler implements PostBridgeActionHandler { args: [sender as `0x${string}`, spender as `0x${string}`], }); - if (allowance < swapAmount) { + const approvalAmount = swapAmount + (swapAmount * BigInt(slippageBps)) / BigInt(10000); + if (allowance < approvalAmount) { this.logger.info('DexSwap: building approval transaction', { sellToken, spender, currentAllowance: allowance.toString(), - requiredAmount: swapAmount.toString(), + approvalAmount: approvalAmount.toString(), + swapAmount: swapAmount.toString(), + slippageBps, destinationChainId, }); @@ -165,7 +168,7 @@ export class DexSwapActionHandler implements PostBridgeActionHandler { data: encodeFunctionData({ abi: erc20Abi, functionName: 'approve', - args: [spender as `0x${string}`, swapAmount], + args: [spender as `0x${string}`, approvalAmount], }), value: BigInt(0), }, @@ -175,7 +178,7 @@ export class DexSwapActionHandler implements PostBridgeActionHandler { sellToken, spender, currentAllowance: allowance.toString(), - requiredAmount: swapAmount.toString(), + requiredAmount: approvalAmount.toString(), destinationChainId, }); } diff --git a/packages/adapters/rebalance/src/adapters/linea/linea.ts b/packages/adapters/rebalance/src/adapters/linea/linea.ts index 5ec77d19..620520fc 100644 --- a/packages/adapters/rebalance/src/adapters/linea/linea.ts +++ b/packages/adapters/rebalance/src/adapters/linea/linea.ts @@ -432,7 +432,10 @@ export class LineaNativeBridgeAdapter implements BridgeAdapter { // The Linea SDK issues wide-range eth_getLogs on both L1 and L2. // Commercial free-tier providers (Alchemy, DRPC) reject block ranges >10k. // Try configured providers first, then fall back to public RPCs for both chains. - const l1Candidates = [...(this.chains[ETHEREUM_CHAIN_ID.toString()]?.providers ?? []), ...LINEA_SDK_FALLBACK_L1_RPCS]; + const l1Candidates = [ + ...(this.chains[ETHEREUM_CHAIN_ID.toString()]?.providers ?? []), + ...LINEA_SDK_FALLBACK_L1_RPCS, + ]; const l2Candidates = [...(this.chains[LINEA_CHAIN_ID.toString()]?.providers ?? []), ...LINEA_SDK_FALLBACK_L2_RPCS]; if (l2Candidates.length === 0) { diff --git a/packages/adapters/rebalance/src/adapters/pendle/types.ts b/packages/adapters/rebalance/src/adapters/pendle/types.ts index d218db6b..5057643c 100644 --- a/packages/adapters/rebalance/src/adapters/pendle/types.ts +++ b/packages/adapters/rebalance/src/adapters/pendle/types.ts @@ -24,6 +24,6 @@ export const PENDLE_SUPPORTED_CHAINS = { export const USDC_PTUSDE_PAIRS: Record = { 1: { usdc: '0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48', - ptUSDe: '0xE8483517077afa11A9B07f849cee2552f040d7b2', + ptUSDe: '0x3de0ff76e8b528c092d47b9dac775931cef80f49', }, }; diff --git a/packages/adapters/rebalance/src/adapters/stargate/stargate.ts b/packages/adapters/rebalance/src/adapters/stargate/stargate.ts index ec3ffadb..9d33931b 100644 --- a/packages/adapters/rebalance/src/adapters/stargate/stargate.ts +++ b/packages/adapters/rebalance/src/adapters/stargate/stargate.ts @@ -63,9 +63,7 @@ export class StargateBridgeAdapter implements BridgeAdapter { */ private resolveDstToken(route: RebalanceRoute): string | null { if (route.destination === 30826) return USDT_TON_STARGATE; - return ( - getDestinationAssetAddress(route.asset, route.origin, route.destination, this.chains, this.logger) ?? null - ); + return getDestinationAssetAddress(route.asset, route.origin, route.destination, this.chains, this.logger) ?? null; } /** diff --git a/packages/adapters/rebalance/src/adapters/zircuit/zircuit.ts b/packages/adapters/rebalance/src/adapters/zircuit/zircuit.ts index f3508321..ce43eae5 100644 --- a/packages/adapters/rebalance/src/adapters/zircuit/zircuit.ts +++ b/packages/adapters/rebalance/src/adapters/zircuit/zircuit.ts @@ -396,12 +396,7 @@ export class ZircuitNativeBridgeAdapter implements BridgeAdapter { data: encodeFunctionData({ abi: zircuitOptimismPortalAbi, functionName: 'proveWithdrawalTransaction', - args: [ - withdrawalTx, - proofResult.l2OutputIndex, - proofResult.outputRootProof, - proofResult.withdrawalProof, - ], + args: [withdrawalTx, proofResult.l2OutputIndex, proofResult.outputRootProof, proofResult.withdrawalProof], }), value: BigInt(0), }, @@ -413,40 +408,44 @@ export class ZircuitNativeBridgeAdapter implements BridgeAdapter { } async isCallbackComplete(route: RebalanceRoute, originTransaction: TransactionReceipt): Promise { - const isL1ToL2 = route.origin === ETHEREUM_CHAIN_ID && route.destination === ZIRCUIT_CHAIN_ID; - const isL2ToL1 = route.origin === ZIRCUIT_CHAIN_ID && route.destination === ETHEREUM_CHAIN_ID; - if (!isL1ToL2 && !isL2ToL1) { - throw new Error(`Unsupported Zircuit route: ${route.origin}->${route.destination}`); - } - if (isL1ToL2) { - return true; - } + try { + const isL1ToL2 = route.origin === ETHEREUM_CHAIN_ID && route.destination === ZIRCUIT_CHAIN_ID; + const isL2ToL1 = route.origin === ZIRCUIT_CHAIN_ID && route.destination === ETHEREUM_CHAIN_ID; + if (!isL1ToL2 && !isL2ToL1) { + throw new Error(`Unsupported Zircuit route: ${route.origin}->${route.destination}`); + } + if (isL1ToL2) { + return true; + } - // L2→L1: complete only when finalized - const l1Client = await this.getClient(ETHEREUM_CHAIN_ID); - const l2Client = await this.getClient(ZIRCUIT_CHAIN_ID); + // L2→L1: complete only when finalized + const l1Client = await this.getClient(ETHEREUM_CHAIN_ID); + const l2Client = await this.getClient(ZIRCUIT_CHAIN_ID); - const withdrawalTx = await this.extractWithdrawalTransaction(l2Client, originTransaction); - if (!withdrawalTx) { - // Cannot determine state — treat as complete to avoid stuck entries - return true; - } + const withdrawalTx = await this.extractWithdrawalTransaction(l2Client, originTransaction); + if (!withdrawalTx) { + // Cannot determine state — treat as complete to avoid stuck entries + return true; + } - const withdrawalHash = this.hashWithdrawal(withdrawalTx); - const isFinalized = await l1Client.readContract({ - address: ZIRCUIT_OPTIMISM_PORTAL as `0x${string}`, - abi: zircuitOptimismPortalAbi, - functionName: 'finalizedWithdrawals', - args: [withdrawalHash], - }); + const withdrawalHash = this.hashWithdrawal(withdrawalTx); + const isFinalized = await l1Client.readContract({ + address: ZIRCUIT_OPTIMISM_PORTAL as `0x${string}`, + abi: zircuitOptimismPortalAbi, + functionName: 'finalizedWithdrawals', + args: [withdrawalHash], + }); - this.logger.info('Zircuit isCallbackComplete check', { - txHash: originTransaction.transactionHash, - withdrawalHash, - isFinalized, - }); + this.logger.info('Zircuit isCallbackComplete check', { + txHash: originTransaction.transactionHash, + withdrawalHash, + isFinalized, + }); - return isFinalized as boolean; + return isFinalized as boolean; + } catch (error) { + this.handleError(error, 'check if callback is complete', { route, originTransaction }); + } } private async getClient(chainId: number): Promise { diff --git a/packages/adapters/rebalance/test/actions/dex-swap.spec.ts b/packages/adapters/rebalance/test/actions/dex-swap.spec.ts index d5711429..aa7f7c11 100644 --- a/packages/adapters/rebalance/test/actions/dex-swap.spec.ts +++ b/packages/adapters/rebalance/test/actions/dex-swap.spec.ts @@ -236,7 +236,7 @@ describe('DexSwapActionHandler', () => { const customEstOutput = '9999999999999999999'; mockReadContract .mockResolvedValueOnce(BigInt(1000000) as never) // balanceOf - .mockResolvedValueOnce(BigInt(1000000) as never); // allowance (sufficient) + .mockResolvedValueOnce(BigInt(1010000) as never); // allowance (sufficient including slippage padding) axiosPostMock.mockResolvedValueOnce({ data: { diff --git a/packages/adapters/rebalance/test/adapters/pendle/pendle.spec.ts b/packages/adapters/rebalance/test/adapters/pendle/pendle.spec.ts index 6191cbe3..665b5a6e 100644 --- a/packages/adapters/rebalance/test/adapters/pendle/pendle.spec.ts +++ b/packages/adapters/rebalance/test/adapters/pendle/pendle.spec.ts @@ -284,7 +284,7 @@ describe('PendleBridgeAdapter', () => { it('has USDC/ptUSDe pair for mainnet', () => { expect(USDC_PTUSDE_PAIRS[1]).toBeDefined(); expect(USDC_PTUSDE_PAIRS[1].usdc).toBe('0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48'); - expect(USDC_PTUSDE_PAIRS[1].ptUSDe).toBe('0xE8483517077afa11A9B07f849cee2552f040d7b2'); + expect(USDC_PTUSDE_PAIRS[1].ptUSDe).toBe('0x3de0ff76e8b528c092d47b9dac775931cef80f49'); }); it('has mainnet in supported chains', () => { diff --git a/packages/agent/src/adapters.ts b/packages/agent/src/adapters.ts index 370a0121..d525f709 100644 --- a/packages/agent/src/adapters.ts +++ b/packages/agent/src/adapters.ts @@ -65,8 +65,17 @@ export function initializeBaseAdapters( ); // Initialize fill service chain service if configured + // Check all rebalance configs that may use a separate fill service sender let fillServiceChainService: ChainService | undefined; - const fsSenderAddress = config.tacRebalance?.fillService?.senderAddress ?? config.tacRebalance?.fillService?.address; + const fsSenderAddress = + config.tacRebalance?.fillService?.senderAddress ?? + config.tacRebalance?.fillService?.address ?? + config.methRebalance?.fillService?.senderAddress ?? + config.methRebalance?.fillService?.address ?? + config.aManUsdeRebalance?.fillService?.senderAddress ?? + config.aManUsdeRebalance?.fillService?.address ?? + config.aMansyrupUsdtRebalance?.fillService?.senderAddress ?? + config.aMansyrupUsdtRebalance?.fillService?.address; if (config.fillServiceSignerUrl && fsSenderAddress) { logger.info('Initializing Fill Service chain service', { signerUrl: config.fillServiceSignerUrl, diff --git a/packages/agent/src/validation.ts b/packages/agent/src/validation.ts index 0d3ff1bd..e74a8a7e 100644 --- a/packages/agent/src/validation.ts +++ b/packages/agent/src/validation.ts @@ -1,5 +1,11 @@ import { Logger } from '@mark/logger'; -import { MarkConfiguration, TokenRebalanceConfig } from '@mark/core'; +import { + MarkConfiguration, + TokenRebalanceConfig, + TOKEN_REBALANCER_KEYS, + TokenRebalancerKey, + SolanaRebalanceConfig, +} from '@mark/core'; /** * Validates a single token rebalance configuration. @@ -7,7 +13,7 @@ import { MarkConfiguration, TokenRebalanceConfig } from '@mark/core'; */ function validateSingleTokenRebalanceConfig( tokenConfig: TokenRebalanceConfig | undefined, - configName: 'tacRebalance' | 'methRebalance', + configName: TokenRebalancerKey, config: MarkConfiguration, logger: Logger, ): void { @@ -99,11 +105,56 @@ function validateSingleTokenRebalanceConfig( }); } +/** + * Validates Solana ptUSDe rebalance configuration. + * Solana uses a different config shape (SolanaRebalanceConfig) than the EVM token rebalancers. + */ +function validateSolanaRebalanceConfig( + solanaConfig: SolanaRebalanceConfig | undefined, + config: MarkConfiguration, + logger: Logger, +): void { + if (!solanaConfig?.enabled) { + logger.debug('solanaPtusdeRebalance disabled, skipping config validation'); + return; + } + + const errors: string[] = []; + + if (!solanaConfig.ptUsdeThreshold) { + errors.push('solanaPtusdeRebalance.ptUsdeThreshold is required when enabled'); + } + if (!solanaConfig.ptUsdeTarget) { + errors.push('solanaPtusdeRebalance.ptUsdeTarget is required when enabled'); + } + if (!solanaConfig.bridge?.minRebalanceAmount) { + errors.push('solanaPtusdeRebalance.bridge.minRebalanceAmount is required'); + } + if (!config.solana?.privateKey) { + errors.push('solana.privateKey (SOLANA_PRIVATE_KEY) is required for Solana rebalancing'); + } + + if (errors.length > 0) { + const errorMessage = `solanaPtusdeRebalance config validation failed:\n - ${errors.join('\n - ')}`; + logger.error('solanaPtusdeRebalance config validation failed', { errors }); + throw new Error(errorMessage); + } + + logger.info('solanaPtusdeRebalance config validated successfully', { + ptUsdeThreshold: solanaConfig.ptUsdeThreshold, + ptUsdeTarget: solanaConfig.ptUsdeTarget, + minRebalanceAmount: solanaConfig.bridge.minRebalanceAmount, + maxRebalanceAmount: solanaConfig.bridge.maxRebalanceAmount, + }); +} + /** * Validates token rebalance configuration for production readiness. * Throws if required fields are missing when token rebalancing is enabled. */ export function validateTokenRebalanceConfig(config: MarkConfiguration, logger: Logger): void { - validateSingleTokenRebalanceConfig(config.tacRebalance, 'tacRebalance', config, logger); - validateSingleTokenRebalanceConfig(config.methRebalance, 'methRebalance', config, logger); + for (const key of TOKEN_REBALANCER_KEYS) { + validateSingleTokenRebalanceConfig(config[key], key, config, logger); + } + validateSolanaRebalanceConfig(config.solanaPtusdeRebalance, config, logger); } diff --git a/packages/core/src/config.ts b/packages/core/src/config.ts index f585805c..94b6e14d 100644 --- a/packages/core/src/config.ts +++ b/packages/core/src/config.ts @@ -13,13 +13,14 @@ import { RouteRebalancingConfig, PostBridgeActionConfig, LogLevel, + TokenRebalanceConfig, } from './types'; import yaml from 'js-yaml'; import fs, { existsSync, readFileSync } from 'fs'; -import { getSsmParameter } from './ssm'; +import { getSsmParameter, SsmParameterReadError } from './ssm'; import { hexToBase58 } from './solana'; import { isTvmChain } from './tron'; -import { getRebalanceConfigFromS3 } from './s3'; +import { getRebalanceConfigFromS3, getThresholdRebalanceConfigFromS3 } from './s3'; import { stitchConfig, loadManifest, setValueByPath } from './shard'; config(); @@ -194,6 +195,128 @@ export const loadRebalanceRoutes = async (): Promise => { }; }; +interface TokenRebalanceDefaults { + mmThreshold?: string; + mmTarget?: string; + fsThreshold?: string; + fsTarget?: string; + slippageDbps?: number; + minAmount?: string; + maxAmount?: string; +} + +/** + * Convert empty strings to undefined so they don't short-circuit ?? fallback chains. + * Fee-admin S3 export should already convert empty strings to null, but this + * provides a defensive layer in mark to prevent empty string overrides. + */ +const nonEmpty = (value: string | undefined | null): string | undefined => + value === '' || value === null ? undefined : value; + +async function loadTokenRebalanceConfig( + s3Config: TokenRebalanceConfig | undefined, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + configJson: Record, + configKey: string, + envPrefix: string, + defaults?: TokenRebalanceDefaults, +): Promise { + // Priority: S3 (fee-admin) > configJson (SSM) > env vars > defaults + const s3 = s3Config; + const cfg = configJson[configKey]; + return { + enabled: + s3?.enabled ?? + parseBooleanValue(cfg?.enabled) ?? + parseBooleanValue(await fromEnv(`${envPrefix}_ENABLED`, true)) ?? + false, + marketMaker: { + address: + nonEmpty(s3?.marketMaker?.address) ?? + cfg?.marketMaker?.address ?? + (await fromEnv(`${envPrefix}_MARKET_MAKER_ADDRESS`, true)) ?? + undefined, + onDemandEnabled: + s3?.marketMaker?.onDemandEnabled ?? + parseBooleanValue(cfg?.marketMaker?.onDemandEnabled) ?? + parseBooleanValue(await fromEnv(`${envPrefix}_MARKET_MAKER_ON_DEMAND_ENABLED`, true)) ?? + false, + thresholdEnabled: + s3?.marketMaker?.thresholdEnabled ?? + parseBooleanValue(cfg?.marketMaker?.thresholdEnabled) ?? + parseBooleanValue(await fromEnv(`${envPrefix}_MARKET_MAKER_THRESHOLD_ENABLED`, true)) ?? + false, + threshold: + nonEmpty(s3?.marketMaker?.threshold) ?? + cfg?.marketMaker?.threshold ?? + (await fromEnv(`${envPrefix}_MARKET_MAKER_THRESHOLD`, true)) ?? + defaults?.mmThreshold ?? + undefined, + targetBalance: + nonEmpty(s3?.marketMaker?.targetBalance) ?? + cfg?.marketMaker?.targetBalance ?? + (await fromEnv(`${envPrefix}_MARKET_MAKER_TARGET_BALANCE`, true)) ?? + defaults?.mmTarget ?? + undefined, + }, + fillService: { + address: + nonEmpty(s3?.fillService?.address) ?? + cfg?.fillService?.address ?? + (await fromEnv(`${envPrefix}_FILL_SERVICE_ADDRESS`, true)) ?? + undefined, + senderAddress: + nonEmpty(s3?.fillService?.senderAddress) ?? + cfg?.fillService?.senderAddress ?? + (await fromEnv(`${envPrefix}_FILL_SERVICE_SENDER_ADDRESS`, true)) ?? + undefined, + thresholdEnabled: + s3?.fillService?.thresholdEnabled ?? + parseBooleanValue(cfg?.fillService?.thresholdEnabled) ?? + parseBooleanValue(await fromEnv(`${envPrefix}_FILL_SERVICE_THRESHOLD_ENABLED`, true)) ?? + false, + threshold: + nonEmpty(s3?.fillService?.threshold) ?? + cfg?.fillService?.threshold ?? + (await fromEnv(`${envPrefix}_FILL_SERVICE_THRESHOLD`, true)) ?? + defaults?.fsThreshold ?? + undefined, + targetBalance: + nonEmpty(s3?.fillService?.targetBalance) ?? + cfg?.fillService?.targetBalance ?? + (await fromEnv(`${envPrefix}_FILL_SERVICE_TARGET_BALANCE`, true)) ?? + defaults?.fsTarget ?? + undefined, + allowCrossWalletRebalancing: + s3?.fillService?.allowCrossWalletRebalancing ?? + parseBooleanValue(cfg?.fillService?.allowCrossWalletRebalancing) ?? + parseBooleanValue(await fromEnv(`${envPrefix}_FILL_SERVICE_ALLOW_CROSS_WALLET`, true)) ?? + false, + }, + bridge: { + slippageDbps: + s3?.bridge?.slippageDbps ?? + cfg?.bridge?.slippageDbps ?? + parseInt( + (await fromEnv(`${envPrefix}_BRIDGE_SLIPPAGE_DBPS`, true)) ?? String(defaults?.slippageDbps ?? 500), + 10, + ), + minRebalanceAmount: + nonEmpty(s3?.bridge?.minRebalanceAmount) ?? + cfg?.bridge?.minRebalanceAmount ?? + (await fromEnv(`${envPrefix}_BRIDGE_MIN_REBALANCE_AMOUNT`, true)) ?? + defaults?.minAmount ?? + '100000000', // Safe default: 100 units (6-decimal tokens like USDC/USDT) + maxRebalanceAmount: + nonEmpty(s3?.bridge?.maxRebalanceAmount) ?? + cfg?.bridge?.maxRebalanceAmount ?? + (await fromEnv(`${envPrefix}_BRIDGE_MAX_REBALANCE_AMOUNT`, true)) ?? + defaults?.maxAmount ?? + '100000000', // Safe default: 100 units (6-decimal tokens) — prevents unlimited bridging + }, + }; +} + export async function loadConfiguration(): Promise { try { const environment = ((await fromEnv('ENVIRONMENT')) ?? 'local') as Environment; @@ -386,167 +509,71 @@ export async function loadConfiguration(): Promise { privateKey: configJson.solana?.privateKey ?? (await fromEnv('SOLANA_PRIVATE_KEY', true)) ?? undefined, rpcUrl: configJson.solana?.rpcUrl ?? (await fromEnv('SOLANA_RPC_URL', true)) ?? undefined, }, - tacRebalance: { - enabled: - parseBooleanValue(configJson.tacRebalance?.enabled) ?? - parseBooleanValue(await fromEnv('TAC_REBALANCE_ENABLED', true)) ?? - false, - marketMaker: { - address: - configJson.tacRebalance?.marketMaker?.address ?? - (await fromEnv('TAC_REBALANCE_MARKET_MAKER_ADDRESS', true)) ?? - undefined, - onDemandEnabled: - parseBooleanValue(configJson.tacRebalance?.marketMaker?.onDemandEnabled) ?? - parseBooleanValue(await fromEnv('TAC_REBALANCE_MARKET_MAKER_ON_DEMAND_ENABLED', true)) ?? - false, - thresholdEnabled: - parseBooleanValue(configJson.tacRebalance?.marketMaker?.thresholdEnabled) ?? - parseBooleanValue(await fromEnv('TAC_REBALANCE_MARKET_MAKER_THRESHOLD_ENABLED', true)) ?? - false, - threshold: - configJson.tacRebalance?.marketMaker?.threshold ?? - (await fromEnv('TAC_REBALANCE_MARKET_MAKER_THRESHOLD', true)) ?? - undefined, - targetBalance: - configJson.tacRebalance?.marketMaker?.targetBalance ?? - (await fromEnv('TAC_REBALANCE_MARKET_MAKER_TARGET_BALANCE', true)) ?? - undefined, - }, - fillService: { - address: - configJson.tacRebalance?.fillService?.address ?? - (await fromEnv('TAC_REBALANCE_FILL_SERVICE_ADDRESS', true)) ?? - undefined, - senderAddress: - configJson.tacRebalance?.fillService?.senderAddress ?? - (await fromEnv('TAC_REBALANCE_FILL_SERVICE_SENDER_ADDRESS', true)) ?? - undefined, // Filler's ETH address for sending from mainnet - thresholdEnabled: - parseBooleanValue(configJson.tacRebalance?.fillService?.thresholdEnabled) ?? - parseBooleanValue(await fromEnv('TAC_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED', true)) ?? - false, - threshold: - configJson.tacRebalance?.fillService?.threshold ?? - (await fromEnv('TAC_REBALANCE_FILL_SERVICE_THRESHOLD', true)) ?? - undefined, - targetBalance: - configJson.tacRebalance?.fillService?.targetBalance ?? - (await fromEnv('TAC_REBALANCE_FILL_SERVICE_TARGET_BALANCE', true)) ?? - undefined, - allowCrossWalletRebalancing: - parseBooleanValue(configJson.tacRebalance?.fillService?.allowCrossWalletRebalancing) ?? - parseBooleanValue(await fromEnv('TAC_REBALANCE_FILL_SERVICE_ALLOW_CROSS_WALLET', true)) ?? - false, - }, - bridge: { - slippageDbps: - configJson.tacRebalance?.bridge?.slippageDbps ?? - parseInt((await fromEnv('TAC_REBALANCE_BRIDGE_SLIPPAGE_DBPS', true)) ?? '500', 10), - minRebalanceAmount: - configJson.tacRebalance?.bridge?.minRebalanceAmount ?? - (await fromEnv('TAC_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT', true)) ?? - undefined, - maxRebalanceAmount: - configJson.tacRebalance?.bridge?.maxRebalanceAmount ?? - (await fromEnv('TAC_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT', true)) ?? - undefined, // Max amount per operation (optional cap) - }, - }, - methRebalance: { - enabled: - parseBooleanValue(configJson.methRebalance?.enabled) ?? - parseBooleanValue(await fromEnv('METH_REBALANCE_ENABLED', true)) ?? - false, - marketMaker: { - address: - configJson.methRebalance?.marketMaker?.address ?? - (await fromEnv('METH_REBALANCE_MARKET_MAKER_ADDRESS', true)) ?? - undefined, - onDemandEnabled: - parseBooleanValue(configJson.methRebalance?.marketMaker?.onDemandEnabled) ?? - parseBooleanValue(await fromEnv('METH_REBALANCE_MARKET_MAKER_ON_DEMAND_ENABLED', true)) ?? - false, - thresholdEnabled: - parseBooleanValue(configJson.methRebalance?.marketMaker?.thresholdEnabled) ?? - parseBooleanValue(await fromEnv('METH_REBALANCE_MARKET_MAKER_THRESHOLD_ENABLED', true)) ?? - false, - threshold: - configJson.methRebalance?.marketMaker?.threshold ?? - (await fromEnv('METH_REBALANCE_MARKET_MAKER_THRESHOLD', true)) ?? - undefined, - targetBalance: - configJson.methRebalance?.marketMaker?.targetBalance ?? - (await fromEnv('METH_REBALANCE_MARKET_MAKER_TARGET_BALANCE', true)) ?? - undefined, - }, - fillService: { - address: - configJson.methRebalance?.fillService?.address ?? - (await fromEnv('METH_REBALANCE_FILL_SERVICE_ADDRESS', true)) ?? - undefined, - senderAddress: - configJson.methRebalance?.fillService?.senderAddress ?? - (await fromEnv('METH_REBALANCE_FILL_SERVICE_SENDER_ADDRESS', true)) ?? - undefined, // Filler's ETH address for sending from mainnet - thresholdEnabled: - parseBooleanValue(configJson.methRebalance?.fillService?.thresholdEnabled) ?? - parseBooleanValue(await fromEnv('METH_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED', true)) ?? - false, - threshold: - configJson.methRebalance?.fillService?.threshold ?? - (await fromEnv('METH_REBALANCE_FILL_SERVICE_THRESHOLD', true)) ?? - undefined, - targetBalance: - configJson.methRebalance?.fillService?.targetBalance ?? - (await fromEnv('METH_REBALANCE_FILL_SERVICE_TARGET_BALANCE', true)) ?? - undefined, - allowCrossWalletRebalancing: - parseBooleanValue(configJson.methRebalance?.fillService?.allowCrossWalletRebalancing) ?? - parseBooleanValue(await fromEnv('METH_REBALANCE_FILL_SERVICE_ALLOW_CROSS_WALLET', true)) ?? - false, - }, - bridge: { - slippageDbps: - configJson.methRebalance?.bridge?.slippageDbps ?? - parseInt((await fromEnv('METH_REBALANCE_BRIDGE_SLIPPAGE_DBPS', true)) ?? '500', 10), - minRebalanceAmount: - configJson.methRebalance?.bridge?.minRebalanceAmount ?? - (await fromEnv('METH_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT', true)) ?? - undefined, - maxRebalanceAmount: - configJson.methRebalance?.bridge?.maxRebalanceAmount ?? - (await fromEnv('METH_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT', true)) ?? - undefined, // Max amount per operation (optional cap) - }, - }, - solanaPtusdeRebalance: { - enabled: - parseBooleanValue(configJson.solanaPtusdeRebalance?.enabled) ?? - parseBooleanValue(await fromEnv('SOLANA_PTUSDE_REBALANCE_ENABLED', true)) ?? - true, - ptUsdeThreshold: - configJson.solanaPtusdeRebalance?.ptUsdeThreshold ?? - (await fromEnv('SOLANA_PTUSDE_REBALANCE_THRESHOLD', true)) ?? - '100000000000', // 100 ptUSDe (9 decimals on Solana) - ptUsdeTarget: - configJson.solanaPtusdeRebalance?.ptUsdeTarget ?? - (await fromEnv('SOLANA_PTUSDE_REBALANCE_TARGET', true)) ?? - '500000000000', // 500 ptUSDe (9 decimals on Solana) - bridge: { - slippageDbps: - configJson.solanaPtusdeRebalance?.bridge?.slippageDbps ?? - parseInt((await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_SLIPPAGE_DBPS', true)) ?? '50', 10), // 0.5% default - minRebalanceAmount: - configJson.solanaPtusdeRebalance?.bridge?.minRebalanceAmount ?? - (await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT', true)) ?? - '1000000', // 1 USDC minimum (6 decimals) - maxRebalanceAmount: - configJson.solanaPtusdeRebalance?.bridge?.maxRebalanceAmount ?? - (await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT', true)) ?? - '100000000', // 100 USDC max (6 decimals) - }, - }, + // Fetch threshold configs from S3 (fee-admin) - highest priority source + // Falls back gracefully to SSM/env if S3 is unavailable or empty + ...(await (async () => { + const thresholdS3 = await getThresholdRebalanceConfigFromS3(); + const solanaS3 = thresholdS3?.solanaPtusdeRebalance; + return { + tacRebalance: await loadTokenRebalanceConfig( + thresholdS3?.tacRebalance, + configJson, + 'tacRebalance', + 'TAC_REBALANCE', + ), + methRebalance: await loadTokenRebalanceConfig( + thresholdS3?.methRebalance, + configJson, + 'methRebalance', + 'METH_REBALANCE', + ), + aManUsdeRebalance: await loadTokenRebalanceConfig( + thresholdS3?.aManUsdeRebalance, + configJson, + 'aManUsdeRebalance', + 'AMANUSDE_REBALANCE', + ), + aMansyrupUsdtRebalance: await loadTokenRebalanceConfig( + thresholdS3?.aMansyrupUsdtRebalance, + configJson, + 'aMansyrupUsdtRebalance', + 'AMANSYRUPUSDT_REBALANCE', + ), + solanaPtusdeRebalance: { + enabled: + solanaS3?.enabled ?? + parseBooleanValue(configJson.solanaPtusdeRebalance?.enabled) ?? + parseBooleanValue(await fromEnv('SOLANA_PTUSDE_REBALANCE_ENABLED', true)) ?? + false, + ptUsdeThreshold: + nonEmpty(solanaS3?.ptUsdeThreshold) ?? + configJson.solanaPtusdeRebalance?.ptUsdeThreshold ?? + (await fromEnv('SOLANA_PTUSDE_REBALANCE_THRESHOLD', true)) ?? + '100000000000', // 100 ptUSDe (9 decimals on Solana) + ptUsdeTarget: + nonEmpty(solanaS3?.ptUsdeTarget) ?? + configJson.solanaPtusdeRebalance?.ptUsdeTarget ?? + (await fromEnv('SOLANA_PTUSDE_REBALANCE_TARGET', true)) ?? + '500000000000', // 500 ptUSDe (9 decimals on Solana) + bridge: { + slippageDbps: + solanaS3?.bridge?.slippageDbps ?? + configJson.solanaPtusdeRebalance?.bridge?.slippageDbps ?? + parseInt((await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_SLIPPAGE_DBPS', true)) ?? '50', 10), // 0.5% default + minRebalanceAmount: + nonEmpty(solanaS3?.bridge?.minRebalanceAmount) ?? + configJson.solanaPtusdeRebalance?.bridge?.minRebalanceAmount ?? + (await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT', true)) ?? + '1000000', // 1 USDC minimum (6 decimals) + maxRebalanceAmount: + nonEmpty(solanaS3?.bridge?.maxRebalanceAmount) ?? + configJson.solanaPtusdeRebalance?.bridge?.maxRebalanceAmount ?? + (await fromEnv('SOLANA_PTUSDE_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT', true)) ?? + '100000000', // 100 USDC max (6 decimals) + }, + }, + }; + })()), redis: configJson.redis ?? { host: await requireEnv('REDIS_HOST'), port: parseInt(await requireEnv('REDIS_PORT')), @@ -581,6 +608,7 @@ export async function loadConfiguration(): Promise { }; validateConfiguration(config); + logThresholdRebalancerConfigs(config); return config; } catch (_error: unknown) { const error = _error as Error; @@ -588,6 +616,57 @@ export async function loadConfiguration(): Promise { } } +/** + * Log loaded threshold rebalancer configs at startup. + * Only logs non-secret operational parameters: addresses, thresholds, + * targets, slippage, and amounts. No private keys, mnemonics, or API keys. + */ +function logThresholdRebalancerConfigs(config: MarkConfiguration): void { + const logTokenRebalancer = (name: string, cfg: TokenRebalanceConfig) => { + console.log(`[ThresholdConfig] ${name}:`, { + enabled: cfg.enabled, + marketMaker: { + address: cfg.marketMaker.address, + onDemandEnabled: cfg.marketMaker.onDemandEnabled, + thresholdEnabled: cfg.marketMaker.thresholdEnabled, + threshold: cfg.marketMaker.threshold, + targetBalance: cfg.marketMaker.targetBalance, + }, + fillService: { + address: cfg.fillService.address, + senderAddress: cfg.fillService.senderAddress, + thresholdEnabled: cfg.fillService.thresholdEnabled, + threshold: cfg.fillService.threshold, + targetBalance: cfg.fillService.targetBalance, + allowCrossWalletRebalancing: cfg.fillService.allowCrossWalletRebalancing, + }, + bridge: { + slippageDbps: cfg.bridge.slippageDbps, + minRebalanceAmount: cfg.bridge.minRebalanceAmount, + maxRebalanceAmount: cfg.bridge.maxRebalanceAmount, + }, + }); + }; + + if (config.tacRebalance) logTokenRebalancer('tacRebalance', config.tacRebalance); + if (config.methRebalance) logTokenRebalancer('methRebalance', config.methRebalance); + if (config.aManUsdeRebalance) logTokenRebalancer('aManUsdeRebalance', config.aManUsdeRebalance); + if (config.aMansyrupUsdtRebalance) logTokenRebalancer('aMansyrupUsdtRebalance', config.aMansyrupUsdtRebalance); + + if (config.solanaPtusdeRebalance) { + console.log('[ThresholdConfig] solanaPtusdeRebalance:', { + enabled: config.solanaPtusdeRebalance.enabled, + ptUsdeThreshold: config.solanaPtusdeRebalance.ptUsdeThreshold, + ptUsdeTarget: config.solanaPtusdeRebalance.ptUsdeTarget, + bridge: { + slippageDbps: config.solanaPtusdeRebalance.bridge.slippageDbps, + minRebalanceAmount: config.solanaPtusdeRebalance.bridge.minRebalanceAmount, + maxRebalanceAmount: config.solanaPtusdeRebalance.bridge.maxRebalanceAmount, + }, + }); + } +} + function validateConfiguration(config: MarkConfiguration): void { if (!config.web3SignerUrl) { throw new ConfigurationError('Signer address is required'); @@ -633,7 +712,14 @@ export const requireEnv = async (name: string, checkSsm = false): Promise => { let value = undefined; if (checkSsm) { - value = await getSsmParameter(name); + try { + value = await getSsmParameter(name); + } catch (error) { + if (error instanceof SsmParameterReadError && process.env[name] !== undefined) { + return process.env[name]; + } + throw error; + } } return value ?? process.env[name]; }; diff --git a/packages/core/src/s3.ts b/packages/core/src/s3.ts index 8672a3a3..1eeac0bf 100644 --- a/packages/core/src/s3.ts +++ b/packages/core/src/s3.ts @@ -1,5 +1,5 @@ import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; -import { RebalanceConfig } from './types/config'; +import { RebalanceConfig, ThresholdRebalanceS3Config } from './types/config'; // Singleton client to prevent race conditions let s3Client: S3Client | null = null; @@ -80,3 +80,54 @@ export const getRebalanceConfigFromS3 = async (): Promise SSM/configJson > env vars + */ +export const getThresholdRebalanceConfigFromS3 = async (): Promise => { + try { + const bucket = process.env.REBALANCE_CONFIG_S3_BUCKET; + const key = process.env.THRESHOLD_REBALANCE_CONFIG_S3_KEY || 'threshold-rebalance-config.json'; + const region = process.env.REBALANCE_CONFIG_S3_REGION; + + if (!bucket) { + return null; + } + + const client = getS3Client(region); + if (!client) { + return null; + } + + const command = new GetObjectCommand({ + Bucket: bucket, + Key: key, + }); + + const response = await client.send(command); + + if (!response.Body) { + return null; + } + + const bodyString = await response.Body.transformToString(); + const config = JSON.parse(bodyString) as ThresholdRebalanceS3Config; + + const configKeys = Object.keys(config); + console.log('Successfully loaded threshold rebalance config from S3', { + bucket, + key, + configKeys, + configCount: configKeys.length, + }); + + return config; + } catch (error) { + console.warn('Failed to fetch threshold rebalance config from S3:', error instanceof Error ? error.message : error); + return null; + } +}; diff --git a/packages/core/src/shard/stitcher.ts b/packages/core/src/shard/stitcher.ts index 6ba03518..8c6cfde4 100644 --- a/packages/core/src/shard/stitcher.ts +++ b/packages/core/src/shard/stitcher.ts @@ -16,7 +16,7 @@ import { ShardManifest, ShardedFieldConfig, StitcherOptions, ShardError, ShardEr import { getGcpSecret, configureGcpClient } from './gcp-secret-manager'; import { shamirReconstructPair, isValidShare } from './shamir'; import { xorReconstruct } from './xor'; -import { setValueByPath, deleteValueByPath } from './path-utils'; +import { setValueByPath, deleteValueByPath, getValueByPath } from './path-utils'; import { getSsmParameter } from '../ssm'; /** @@ -169,25 +169,35 @@ async function processShardedField( ); } - // Fetch Share 1 from AWS SSM Parameter Store - logger?.debug?.(`Fetching Share 1 for '${path}' from AWS SSM: ${awsParamName}`); + // Use Share 1 from configJson if already pre-loaded (avoids redundant SSM read) + const preloadedValue = getValueByPath(configJson, path); + let share1Value: string; - const share1Value = await getSsmParameter(awsParamName); + if (typeof preloadedValue === 'string' && preloadedValue.length > 0) { + share1Value = preloadedValue; + logger?.debug?.(`Using pre-loaded Share 1 for '${path}' from config`); + } else { + logger?.debug?.(`Fetching Share 1 for '${path}' from AWS SSM: ${awsParamName}`); - if (share1Value === undefined || share1Value === null) { - throw new ShardError( - `Share 1 not found in AWS SSM at '${awsParamName}' for field '${path}'`, - ShardErrorCode.FIELD_NOT_FOUND, - { path, awsParamName }, - ); - } + const ssmValue = await getSsmParameter(awsParamName); - if (typeof share1Value !== 'string') { - throw new ShardError( - `Share 1 from AWS SSM '${awsParamName}' must be a string, got ${typeof share1Value}`, - ShardErrorCode.INVALID_SHARE_FORMAT, - { path, awsParamName, type: typeof share1Value }, - ); + if (ssmValue === undefined || ssmValue === null) { + throw new ShardError( + `Share 1 not found in AWS SSM at '${awsParamName}' for field '${path}'`, + ShardErrorCode.FIELD_NOT_FOUND, + { path, awsParamName }, + ); + } + + if (typeof ssmValue !== 'string') { + throw new ShardError( + `Share 1 from AWS SSM '${awsParamName}' must be a string, got ${typeof ssmValue}`, + ShardErrorCode.INVALID_SHARE_FORMAT, + { path, awsParamName, type: typeof ssmValue }, + ); + } + + share1Value = ssmValue; } // Fetch Share 2 from GCP Secret Manager diff --git a/packages/core/src/ssm.ts b/packages/core/src/ssm.ts index bb755eef..0a6556be 100644 --- a/packages/core/src/ssm.ts +++ b/packages/core/src/ssm.ts @@ -1,16 +1,25 @@ -import { SSMClient, DescribeParametersCommand, GetParameterCommand } from '@aws-sdk/client-ssm'; +import { SSMClient, GetParameterCommand } from '@aws-sdk/client-ssm'; -// Singleton client to prevent race conditions let ssmClient: SSMClient | null = null; let clientInitializationFailed = false; +export class SsmParameterReadError extends Error { + constructor( + public readonly parameterName: string, + message: string, + public readonly originalError?: unknown, + ) { + super(message); + this.name = 'SsmParameterReadError'; + } +} + const getSSMClient = (): SSMClient | null => { if (clientInitializationFailed) { return null; } if (!ssmClient) { - // Check if AWS region is available before attempting to initialize if (!process.env.AWS_REGION && !process.env.AWS_DEFAULT_REGION) { console.warn('AWS region not configured, using environment variable fallbacks'); clientInitializationFailed = true; @@ -32,61 +41,81 @@ const getSSMClient = (): SSMClient | null => { return ssmClient; }; +const MAX_RETRIES = 3; +const BASE_DELAY_MS = 200; + +async function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function isRetryableError(error: unknown): boolean { + if (!(error instanceof Error)) return false; + const name = (error as { name?: string }).name ?? ''; + const message = error.message ?? ''; + return ( + name === 'ThrottlingException' || + name === 'TooManyRequestsException' || + name === 'ProvisionedThroughputExceededException' || + name === 'InternalServerError' || + name === 'ServiceUnavailableException' || + message.includes('Rate exceeded') || + message.includes('Throttling') || + message.includes('ECONNRESET') || + message.includes('socket hang up') + ); +} + /** - * Gets a parameter from AWS Systems Manager Parameter Store + * Gets a parameter from AWS Systems Manager Parameter Store. + * Uses GetParameter directly (no DescribeParameters pre-check) to halve API calls. + * Retries on transient/throttling errors with exponential backoff. + * * @param name - The name of the parameter - * @returns - The parameter string value, or undefined if the parameter not found or SSM is unavailable. + * @returns The parameter string value, or undefined if not found or SSM is unavailable. */ export const getSsmParameter = async (name: string): Promise => { - try { - const client = getSSMClient(); - if (!client) { - return undefined; - } + const client = getSSMClient(); + if (!client) { + throw new SsmParameterReadError(name, `SSM client unavailable while fetching parameter '${name}'`); + } - // Check if the parameter exists. - const describeParametersCommand = new DescribeParametersCommand({ - ParameterFilters: [ - { - Key: 'Name', - Option: 'Equals', - Values: [name], - }, - ], - }); - - let describeParametersResponse; + const command = new GetParameterCommand({ + Name: name, + WithDecryption: true, + }); + + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { try { - describeParametersResponse = await client.send(describeParametersCommand); + const response = await client.send(command); + return response.Parameter?.Value; } catch (error) { - // Handle region-related and other AWS configuration errors - console.warn(`⚠️ Failed to fetch SSM parameter '${name}':`, error instanceof Error ? error.message : error); - return undefined; - } + const errorName = (error as { name?: string }).name ?? ''; - if (!describeParametersResponse.Parameters?.length) { - return undefined; - } + if (errorName === 'ParameterNotFound') { + return undefined; + } - // Get the parameter value. - const getParameterCommand = new GetParameterCommand({ - Name: name, - WithDecryption: true, - }); + if (isRetryableError(error) && attempt < MAX_RETRIES) { + const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 100; + console.warn( + `⚠️ SSM parameter '${name}' read failed (attempt ${attempt + 1}/${MAX_RETRIES + 1}, retrying in ${Math.round(delay)}ms):`, + error instanceof Error ? error.message : error, + ); + await sleep(delay); + continue; + } - let getParameterResponse; - try { - getParameterResponse = await client.send(getParameterCommand); - } catch (error) { - // Handle region-related and other AWS configuration errors - console.warn(`⚠️ Failed to fetch SSM parameter '${name}':`, error instanceof Error ? error.message : error); - return undefined; + console.warn( + `⚠️ Failed to fetch SSM parameter '${name}' after ${attempt + 1} attempt(s):`, + error instanceof Error ? error.message : error, + ); + throw new SsmParameterReadError( + name, + `Failed to fetch SSM parameter '${name}' after ${attempt + 1} attempt(s): ${error instanceof Error ? error.message : String(error)}`, + error, + ); } - - return getParameterResponse.Parameter?.Value; - } catch (error) { - // Fallback catch for any unexpected errors - console.warn(`⚠️ Failed to fetch SSM parameter '${name}':`, error instanceof Error ? error.message : error); - return undefined; } + + throw new SsmParameterReadError(name, `Unexpected failure while fetching SSM parameter '${name}'`); }; diff --git a/packages/core/src/types/config.ts b/packages/core/src/types/config.ts index 3f7c57e1..f171db9f 100644 --- a/packages/core/src/types/config.ts +++ b/packages/core/src/types/config.ts @@ -147,26 +147,29 @@ export interface TokenRebalanceConfig { enabled: boolean; // Market Maker receiver configuration marketMaker: { - address?: string; // EVM address on TAC for MM + address?: string; // EVM address for MM on destination chain onDemandEnabled: boolean; // Enable invoice-triggered rebalancing thresholdEnabled: boolean; // Enable balance-threshold rebalancing - threshold?: string; // Min USDT balance (6 decimals) - targetBalance?: string; // Target after threshold-triggered rebalance + threshold?: string; // Balance threshold that triggers rebalancing. Units depend on rebalancer: + // TAC USDT: native 6-decimal (code converts to 18-decimal internally) + // mETH: 18-decimal wei (WETH/mETH are natively 18-decimal) + // aManUSDe/aMansyrupUSDT: 18-decimal (getEvmBalance normalizes to 18-decimal) + targetBalance?: string; // Target balance after rebalancing (same units as threshold) }; // Fill Service receiver configuration fillService: { - address?: string; // EVM address on TAC for FS (destination) - also used as sender on ETH if senderAddress not set + address?: string; // EVM address for FS on destination chain - also used as sender on ETH if senderAddress not set senderAddress?: string; // Optional: ETH sender address if different from 'address' (rare - same key = same address) thresholdEnabled: boolean; // Enable balance-threshold rebalancing - threshold?: string; // Min USDT balance (6 decimals) - targetBalance?: string; // Target after threshold-triggered rebalance + threshold?: string; // Balance threshold (same unit semantics as marketMaker.threshold) + targetBalance?: string; // Target balance (same unit semantics as marketMaker.targetBalance) allowCrossWalletRebalancing?: boolean; // Allow MM to fund FS rebalancing when FS has insufficient ETH USDT }; // Shared bridge configuration bridge: { - slippageDbps: number; // Slippage for Stargate (default: 500 = 5%) - minRebalanceAmount: string; // Min amount per operation (6 decimals) - maxRebalanceAmount?: string; // Max amount per operation (optional cap) + slippageDbps: number; // Slippage tolerance in decibasis points (500 = 5%) + minRebalanceAmount: string; // Min amount per bridge operation (in source token native decimals, e.g., 6-dec for USDC) + maxRebalanceAmount?: string; // Max amount per bridge operation (same units as minRebalanceAmount) }; } @@ -186,6 +189,27 @@ export interface SolanaRebalanceConfig { maxRebalanceAmount?: string; // Max USDC amount per operation (optional cap) }; } +export type TokenRebalancerKey = 'tacRebalance' | 'methRebalance' | 'aManUsdeRebalance' | 'aMansyrupUsdtRebalance'; +export const TOKEN_REBALANCER_KEYS: TokenRebalancerKey[] = [ + 'tacRebalance', + 'methRebalance', + 'aManUsdeRebalance', + 'aMansyrupUsdtRebalance', +]; + +/** + * S3 threshold rebalance config shape exported by fee-admin. + * Each key maps to its corresponding TokenRebalanceConfig or SolanaRebalanceConfig. + * Loaded from threshold-rebalance-config.json in the solver's S3 bucket. + */ +export interface ThresholdRebalanceS3Config { + tacRebalance?: TokenRebalanceConfig; + methRebalance?: TokenRebalanceConfig; + aManUsdeRebalance?: TokenRebalanceConfig; + aMansyrupUsdtRebalance?: TokenRebalanceConfig; + solanaPtusdeRebalance?: SolanaRebalanceConfig; +} + export interface RedisConfig { host: string; port: number; @@ -240,6 +264,8 @@ export interface MarkConfiguration extends RebalanceConfig { solanaPtusdeRebalance?: SolanaRebalanceConfig; tacRebalance?: TokenRebalanceConfig; methRebalance?: TokenRebalanceConfig; + aManUsdeRebalance?: TokenRebalanceConfig; + aMansyrupUsdtRebalance?: TokenRebalanceConfig; // Mantle bridge configuration mantle?: { l2Gas?: number; // L2 gas limit for bridge transactions (default: 200000) diff --git a/packages/core/test/shard/stitcher.spec.ts b/packages/core/test/shard/stitcher.spec.ts index 9b7c6c6c..316bdda7 100644 --- a/packages/core/test/shard/stitcher.spec.ts +++ b/packages/core/test/shard/stitcher.spec.ts @@ -102,6 +102,35 @@ describe('stitcher', () => { expect(result.solana.rpcUrl).toBe('https://api.mainnet-beta.solana.com'); // Unchanged }); + it('should use pre-loaded Share 1 from config without re-reading SSM', async () => { + const originalSecret = 'preloaded-secret'; + const { share1, share2 } = shamirSplitPair(originalSecret); + + const config = { + web3_signer_private_key: share1, + }; + + mockedGetGcpSecret.mockResolvedValue(share2); + + const manifest: ShardManifest = { + version: '1.0', + shardedFields: [ + { + path: 'web3_signer_private_key', + awsParamName: '/test/web3_signer_private_key_share1', + gcpSecretRef: { project: 'test-project', secretId: 'test-secret' }, + method: 'shamir', + }, + ], + }; + + const result = await stitchConfig(config, manifest) as { web3_signer_private_key: string }; + + expect(result.web3_signer_private_key).toBe(originalSecret); + expect(mockedGetSsmParameter).not.toHaveBeenCalled(); + expect(mockedGetGcpSecret).toHaveBeenCalledWith('test-project', 'test-secret', undefined); + }); + it('should handle numeric object keys (chain IDs)', async () => { const originalSecret = 'chain-1-private-key'; const { share1, share2 } = shamirSplitPair(originalSecret); diff --git a/packages/core/test/ssm.spec.ts b/packages/core/test/ssm.spec.ts new file mode 100644 index 00000000..1a4a2a76 --- /dev/null +++ b/packages/core/test/ssm.spec.ts @@ -0,0 +1,71 @@ +/** + * Tests for SSM parameter loading. + */ + +describe('ssm', () => { + beforeEach(() => { + jest.resetModules(); + jest.clearAllMocks(); + process.env.AWS_REGION = 'sa-east-1'; + }); + + afterEach(() => { + delete process.env.AWS_REGION; + delete process.env.AWS_DEFAULT_REGION; + jest.useRealTimers(); + }); + + async function loadSsmModule(sendMock: jest.Mock) { + jest.doMock('@aws-sdk/client-ssm', () => ({ + SSMClient: jest.fn().mockImplementation(() => ({ + send: sendMock, + })), + GetParameterCommand: class GetParameterCommand { + constructor(public readonly input: unknown) {} + }, + })); + + return import('../src/ssm'); + } + + it('returns undefined for ParameterNotFound without retrying', async () => { + const sendMock = jest.fn().mockRejectedValue(Object.assign(new Error('missing'), { name: 'ParameterNotFound' })); + const { getSsmParameter } = await loadSsmModule(sendMock); + + await expect(getSsmParameter('/test/missing')).resolves.toBeUndefined(); + expect(sendMock).toHaveBeenCalledTimes(1); + }); + + it('retries throttling errors and eventually succeeds', async () => { + jest.useFakeTimers(); + + const sendMock = jest + .fn() + .mockRejectedValueOnce(Object.assign(new Error('Rate exceeded'), { name: 'ThrottlingException' })) + .mockRejectedValueOnce(Object.assign(new Error('Rate exceeded'), { name: 'ThrottlingException' })) + .mockResolvedValue({ Parameter: { Value: 'secret-value' } }); + + const { getSsmParameter } = await loadSsmModule(sendMock); + + const promise = getSsmParameter('/test/throttled'); + await Promise.resolve(); + await jest.advanceTimersByTimeAsync(1000); + + await expect(promise).resolves.toBe('secret-value'); + expect(sendMock).toHaveBeenCalledTimes(3); + }); + + it('throws a typed error for non-not-found SSM failures', async () => { + const sendMock = jest + .fn() + .mockRejectedValue(Object.assign(new Error('Access denied to parameter'), { name: 'AccessDeniedException' })); + + const { getSsmParameter, SsmParameterReadError } = await loadSsmModule(sendMock); + + const promise = getSsmParameter('/test/denied'); + + await expect(promise).rejects.toBeInstanceOf(SsmParameterReadError); + await expect(promise).rejects.toThrow('Access denied to parameter'); + expect(sendMock).toHaveBeenCalledTimes(1); + }); +}); diff --git a/packages/poller/.env.e2e.example b/packages/poller/.env.e2e.example new file mode 100644 index 00000000..ffeb9916 --- /dev/null +++ b/packages/poller/.env.e2e.example @@ -0,0 +1,190 @@ +# ─── E2E Rebalancer Test Environment ───────────────────────────────────────── +# Copy to .env.e2e then fill in wallet keys and addresses: +# cp .env.e2e.example .env.e2e +# +# Run ALL rebalancers in dry-run mode: +# yarn e2e --run-modes all --dry-run +# +# Run a single rebalancer: +# yarn e2e --run-modes methOnly --dry-run +# yarn e2e --run-modes tacOnly --dry-run +# yarn e2e --run-modes aManUsdeOnly --dry-run +# yarn e2e --run-modes aMansyrupUsdtOnly --dry-run +# yarn e2e --run-modes solanaUsdcOnly --dry-run +# +# Run with JSON config overrides: +# yarn e2e --config ./e2e-config.example.json --dry-run +# +# Prerequisites: +# 1. PostgreSQL: yarn workspace @mark/database db:setup +# 2. Redis: yarn redis:up + +# ─── Environment ───────────────────────────────────────────────────────────── +# Use 'mainnet' so the config loader fetches the mainnet Everclear hosted config +# (chain definitions, asset addresses, etc.) from S3. +# MARK_CONFIG_MAINNET={} prevents SSM lookup for the mark config JSON — env vars below are used instead. +ENVIRONMENT=mainnet +STAGE=development +LOG_LEVEL=info +MARK_CONFIG_MAINNET={} +SHARD_MANIFEST= + +# ─── SSM-backed optional params (set empty to prevent SSM lookups) ──────── +FILL_SERVICE_SIGNER_URL= +BINANCE_API_KEY= +BINANCE_API_SECRET= +COINBASE_API_KEY= +COINBASE_API_SECRET= +KRAKEN_API_KEY= +KRAKEN_API_SECRET= +NEAR_JWT_TOKEN= +STARGATE_API_URL= +TAC_TON_RPC_URL= +TAC_NETWORK=mainnet +TON_MNEMONIC= +TON_RPC_URL= +TON_API_KEY= +TON_SIGNER_ADDRESS= + +# ─── Wallet ────────────────────────────────────────────────────────────────── +# For dry-run, these just need to be valid keys/addresses to initialize adapters. +# No real transactions are submitted. +WEB3_SIGNER_PRIVATE_KEY= # hex private key (EVM) +SIGNER_URL= # web3signer URL (optional, can be empty for dry-run) +SIGNER_ADDRESS= # EVM address corresponding to WEB3_SIGNER_PRIVATE_KEY +SOL_SIGNER_ADDRESS= # Solana address corresponding to SOLANA_PRIVATE_KEY + +# Fill Service signer (if different from main signer; leave empty to use main signer) +FILL_SERVICE_WEB3_SIGNER_PRIVATE_KEY= # hex private key for FS wallet (optional) +FILL_SERVICE_SIGNER_URL= # web3signer URL for FS wallet (optional) + +# ─── RPC Providers ─────────────────────────────────────────────────────────── +# Provide RPC URLs for all chains used by the rebalancers. +# Multiple URLs can be separated by commas for fallback. + +# Ethereum mainnet (required by: mETH, TAC, aManUSDe, aMansyrupUSDT, Solana) +CHAIN_1_PROVIDERS=https://1rpc.io/eth + +# Mantle (required by: mETH, aManUSDe, aMansyrupUSDT) +CHAIN_5000_PROVIDERS=https://mantle.drpc.org + +# TAC / Tron (required by: TAC USDT) +CHAIN_239_PROVIDERS= # TAC RPC URL + +# Solana (required by: Solana ptUSDe) +SOLANA_PRIVATE_KEY= # base58 Solana private key +SOLANA_RPC_URL=https://api.mainnet-beta.solana.com +CHAIN_1399811149_PROVIDERS=https://api.mainnet-beta.solana.com + +# ─── Chain Configuration ──────────────────────────────────────────────────── +# Include all chain IDs used by the rebalancers you want to test +CHAIN_IDS=1,5000,239,1399811149 +SUPPORTED_SETTLEMENT_DOMAINS=1,5000 +SUPPORTED_ASSET_SYMBOLS=USDC,USDT,WETH,mETH,USDe,syrupUSDT,aManUSDe,aMansyrupUSDT + +# Chain assets (format: SYMBOL,address,decimals,TICKER,isNative) +# ETH mainnet — USDC, USDT, WETH +CHAIN_1_ASSETS=USDC,0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48,6,USDC,false;WETH,0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2,18,WETH,false;USDT,0xdAC17F958D2ee523a2206206994597C13D831ec7,6,USDT,false + +# Mantle — USDC, USDT, WETH, mETH, USDe, syrupUSDT, aManUSDe, aMansyrupUSDT +# NOTE: aManUSDe and aMansyrupUSDT addresses are Aave lending pool receipt tokens on Mantle. +# These are loaded from chaindata at runtime. The values below are for local fallback only. +# Replace with real addresses from your deployment if chaindata is not available. +CHAIN_5000_ASSETS=USDC,0x09Bc4E0D864854c6aFB6eB9A9cdF58aC190D0dF9,6,USDC,false;WETH,0xdEAddEaDdeadDEadDEADDEAddEADDEAddead1111,18,WETH,false;USDT,0x201EBa5CC46D216Ce6DC03F6a759e8E766e956aE,6,USDT,false;mETH,0xcDA86A272531e8640cD7F1a92c01839911B90bb0,18,mETH,false + +# TAC / Tron — USDT +CHAIN_239_ASSETS=USDT,0xdAC17F958D2ee523a2206206994597C13D831ec7,6,USDT,false + +# ─── Database ──────────────────────────────────────────────────────────────── +DATABASE_URL=postgresql://postgres:postgres@localhost:5433/mark_dev?sslmode=disable +DATABASE_MIGRATION_PATH=db/migrations + +# ─── Redis ─────────────────────────────────────────────────────────────────── +REDIS_HOST=127.0.0.1 +REDIS_PORT=6379 + +# ─── Metrics (disabled for testing) ───────────────────────────────────────── +PUSH_GATEWAY_URL=http://localhost:9091 +PROMETHEUS_ENABLED=false + +# ─── Everclear API ────────────────────────────────────────────────────────── +EVERCLEAR_API_URL=https://api.everclear.org + +# ─── Routes ───────────────────────────────────────────────────────────────── +# Leave unset — routes are not needed for rebalancer-only testing. +# ROUTES_LOCAL_YAML= + +# ═══════════════════════════════════════════════════════════════════════════════ +# REBALANCER CONFIGURATIONS +# All rebalancers enabled with high thresholds so they always trigger in dry-run. +# Fill in addresses with your test wallet address. +# ═══════════════════════════════════════════════════════════════════════════════ + +# ─── mETH Rebalancer (chains: 1 ↔ 5000) ──────────────────────────────────── +METH_REBALANCE_ENABLED=true +METH_REBALANCE_MARKET_MAKER_ADDRESS= # your EVM wallet address +METH_REBALANCE_MARKET_MAKER_ON_DEMAND_ENABLED=false +METH_REBALANCE_MARKET_MAKER_THRESHOLD_ENABLED=false +METH_REBALANCE_FILL_SERVICE_ADDRESS= # your EVM wallet address (or FS address) +METH_REBALANCE_FILL_SERVICE_SENDER_ADDRESS= # FS sender on mainnet (optional, defaults to FS address) +METH_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED=true +METH_REBALANCE_FILL_SERVICE_THRESHOLD=999000000000000000000 +METH_REBALANCE_FILL_SERVICE_TARGET_BALANCE=999000000000000000000 +METH_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT=100000000000000 +METH_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT=100000000000000 +METH_REBALANCE_BRIDGE_SLIPPAGE_DBPS=500 + +# ─── TAC USDT Rebalancer (chains: 1 ↔ 239) ───────────────────────────────── +TAC_REBALANCE_ENABLED=true +TAC_REBALANCE_MARKET_MAKER_ADDRESS= # your EVM wallet address +TAC_REBALANCE_MARKET_MAKER_ON_DEMAND_ENABLED=false +TAC_REBALANCE_MARKET_MAKER_THRESHOLD_ENABLED=true +TAC_REBALANCE_MARKET_MAKER_THRESHOLD=999000000000000000000 +TAC_REBALANCE_MARKET_MAKER_TARGET_BALANCE=999000000000000000000 +TAC_REBALANCE_FILL_SERVICE_ADDRESS= # your EVM wallet address +TAC_REBALANCE_FILL_SERVICE_SENDER_ADDRESS= +TAC_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED=true +TAC_REBALANCE_FILL_SERVICE_THRESHOLD=999000000000000000000 +TAC_REBALANCE_FILL_SERVICE_TARGET_BALANCE=999000000000000000000 +TAC_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT=100000 +TAC_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT=100000 +TAC_REBALANCE_BRIDGE_SLIPPAGE_DBPS=500 + +# ─── aManUSDe Rebalancer (chains: 1 → 5000, Stargate + DexSwap + AaveSupply) ─ +AMANUSDE_REBALANCE_ENABLED=true +AMANUSDE_REBALANCE_FILL_SERVICE_ADDRESS= # your EVM wallet address +AMANUSDE_REBALANCE_FILL_SERVICE_SENDER_ADDRESS= # FS sender on mainnet (optional) +AMANUSDE_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED=true +AMANUSDE_REBALANCE_FILL_SERVICE_THRESHOLD=999000000000000000000 +AMANUSDE_REBALANCE_FILL_SERVICE_TARGET_BALANCE=999000000000000000000 +AMANUSDE_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT=100000 +AMANUSDE_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT=100000 +AMANUSDE_REBALANCE_BRIDGE_SLIPPAGE_DBPS=50 +AMANUSDE_AAVE_POOL_ADDRESS=0x90df02551bB792286e8D4f13E0e357b4Bf1D6a26 +AMANUSDE_DEX_SWAP_SLIPPAGE_BPS=100 + +# ─── aMansyrupUSDT Rebalancer (chains: 1 → 5000, Stargate + DexSwap + AaveSupply) ─ +AMANSYRUPUSDT_REBALANCE_ENABLED=true +AMANSYRUPUSDT_REBALANCE_FILL_SERVICE_ADDRESS= # your EVM wallet address +AMANSYRUPUSDT_REBALANCE_FILL_SERVICE_SENDER_ADDRESS= +AMANSYRUPUSDT_REBALANCE_FILL_SERVICE_THRESHOLD_ENABLED=true +AMANSYRUPUSDT_REBALANCE_FILL_SERVICE_THRESHOLD=999000000 +AMANSYRUPUSDT_REBALANCE_FILL_SERVICE_TARGET_BALANCE=999000000 +AMANSYRUPUSDT_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT=100000 +AMANSYRUPUSDT_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT=100000 +AMANSYRUPUSDT_REBALANCE_BRIDGE_SLIPPAGE_DBPS=50 +AMANSYRUPUSDT_AAVE_POOL_ADDRESS=0x90df02551bB792286e8D4f13E0e357b4Bf1D6a26 +AMANSYRUPUSDT_DEX_SWAP_SLIPPAGE_BPS=100 + +# ─── Solana ptUSDe Rebalancer (chains: 1399811149 → 1 → 1399811149) ───────── +SOLANA_PTUSDE_REBALANCE_ENABLED=true +SOLANA_PTUSDE_REBALANCE_THRESHOLD=1000000000 +SOLANA_PTUSDE_REBALANCE_TARGET=1000000000 +SOLANA_PTUSDE_REBALANCE_BRIDGE_MIN_REBALANCE_AMOUNT=100000 +SOLANA_PTUSDE_REBALANCE_BRIDGE_MAX_REBALANCE_AMOUNT=100000 +SOLANA_PTUSDE_REBALANCE_BRIDGE_SLIPPAGE_DBPS=500 + +# ─── E2E Runner Defaults ──────────────────────────────────────────────────── +# These can also be set via CLI flags (--dry-run, --config, --run-modes, --sequential) +DRY_RUN=true +# E2E_CONFIG_PATH=./e2e-config.example.json diff --git a/packages/poller/e2e-config.example.json b/packages/poller/e2e-config.example.json new file mode 100644 index 00000000..7baf2306 --- /dev/null +++ b/packages/poller/e2e-config.example.json @@ -0,0 +1,74 @@ +{ + "dryRun": true, + "runModes": ["all"], + "sequential": true, + "overrides": { + "methRebalance": { + "enabled": true, + "fillService": { + "thresholdEnabled": true, + "threshold": "999000000000000000000", + "targetBalance": "999000000000000000000" + }, + "bridge": { + "minRebalanceAmount": "100000000000000", + "maxRebalanceAmount": "100000000000000", + "slippageDbps": 500 + } + }, + "tacRebalance": { + "enabled": true, + "marketMaker": { + "thresholdEnabled": true, + "threshold": "999000000000000000000", + "targetBalance": "999000000000000000000" + }, + "fillService": { + "thresholdEnabled": true, + "threshold": "999000000000000000000", + "targetBalance": "999000000000000000000" + }, + "bridge": { + "minRebalanceAmount": "100000", + "maxRebalanceAmount": "100000", + "slippageDbps": 500 + } + }, + "aManUsdeRebalance": { + "enabled": true, + "fillService": { + "thresholdEnabled": true, + "threshold": "999000000000000000000", + "targetBalance": "999000000000000000000" + }, + "bridge": { + "minRebalanceAmount": "100000", + "maxRebalanceAmount": "100000", + "slippageDbps": 50 + } + }, + "aMansyrupUsdtRebalance": { + "enabled": true, + "fillService": { + "thresholdEnabled": true, + "threshold": "999000000", + "targetBalance": "999000000" + }, + "bridge": { + "minRebalanceAmount": "100000", + "maxRebalanceAmount": "100000", + "slippageDbps": 50 + } + }, + "solanaPtusdeRebalance": { + "enabled": true, + "ptUsdeThreshold": "1000000000", + "ptUsdeTarget": "1000000000", + "bridge": { + "slippageDbps": 500, + "minRebalanceAmount": "100000", + "maxRebalanceAmount": "100000" + } + } + } +} diff --git a/packages/poller/package.json b/packages/poller/package.json index dcf9f88b..43b5d902 100644 --- a/packages/poller/package.json +++ b/packages/poller/package.json @@ -19,7 +19,9 @@ "lint:fix": "yarn lint --fix", "test": "jest", "test:watch": "jest --watch", - "test:coverage": "jest --coverage" + "test:coverage": "jest --coverage", + "e2e": "npx tsx src/e2e/bootstrap.ts", + "e2e:dry": "DRY_RUN=true npx tsx src/e2e/bootstrap.ts" }, "dependencies": { "@mark/agent": "workspace:*", diff --git a/packages/poller/src/e2e/bootstrap.ts b/packages/poller/src/e2e/bootstrap.ts new file mode 100644 index 00000000..29ece5a3 --- /dev/null +++ b/packages/poller/src/e2e/bootstrap.ts @@ -0,0 +1,20 @@ +// Bootstrap: loads .env.e2e BEFORE any other module is imported. +// This file must be the entry point — it uses dynamic import() so that +// @mark/core's top-level dotenv.config() sees our env vars already set. + +import { config as dotenvConfig } from 'dotenv'; +import { existsSync } from 'fs'; +import { resolve } from 'path'; + +const e2eEnvPath = resolve(__dirname, '../../.env.e2e'); +if (existsSync(e2eEnvPath)) { + dotenvConfig({ path: e2eEnvPath, override: true }); +} else { + dotenvConfig(); +} + +// Now safe to load the runner — all imports inside runner.ts will see our env vars. +import('./runner').catch((err) => { + console.error('E2E runner failed to load:', err); + process.exit(1); +}); diff --git a/packages/poller/src/e2e/dry-chain-service.ts b/packages/poller/src/e2e/dry-chain-service.ts new file mode 100644 index 00000000..4ff7077d --- /dev/null +++ b/packages/poller/src/e2e/dry-chain-service.ts @@ -0,0 +1,124 @@ +import { ChainService, SolanaSigner, SolanaTransactionResult } from '@mark/chainservice'; +import type { TransactionReceipt } from '@mark/database'; +import { SupportedBridge } from '@mark/core'; +import { Logger } from '@mark/logger'; + +export interface DryRunCounter { + count: number; +} + +export function createDryRunChainService(real: ChainService, logger: Logger, counter: DryRunCounter): ChainService { + return new Proxy(real, { + get(target, prop, receiver) { + if (prop === 'submitAndMonitor') { + return async ( + chainId: string, + transaction: { to?: string; value?: bigint; funcSig?: string; data?: string }, + ) => { + counter.count++; + logger.info('[DRY RUN] Would submit EVM transaction', { + chainId, + to: transaction.to, + value: transaction.value?.toString(), + funcSig: transaction.funcSig, + data: transaction.data?.substring(0, 10), + }); + // Return synthetic receipt matching TransactionReceipt interface + const receipt: TransactionReceipt = { + transactionHash: `0xdryrun_${Date.now()}_${chainId}`, + from: '', + to: transaction.to || '', + blockNumber: 0, + cumulativeGasUsed: '0', + effectiveGasPrice: '0', + logs: [], + status: 1, + confirmations: 0, + }; + return receipt; + }; + } + return Reflect.get(target, prop, receiver); + }, + }); +} + +export function createDryRunSolanaSigner(real: SolanaSigner, logger: Logger, counter: DryRunCounter): SolanaSigner { + return new Proxy(real, { + get(target, prop, receiver) { + if (prop === 'signAndSendTransaction') { + return async () => { + counter.count++; + logger.info('[DRY RUN] Would submit Solana transaction'); + const result: SolanaTransactionResult = { + signature: `dryrun_sol_${Date.now()}`, + slot: 0, + blockTime: Math.floor(Date.now() / 1000), + success: true, + fee: 0, + logs: ['[DRY RUN] Transaction intercepted'], + }; + return result; + }; + } + if (prop === 'sendSignedTransaction') { + return async () => { + counter.count++; + logger.info('[DRY RUN] Would send pre-signed Solana transaction'); + const result: SolanaTransactionResult = { + signature: `dryrun_sol_signed_${Date.now()}`, + slot: 0, + blockTime: Math.floor(Date.now() / 1000), + success: true, + fee: 0, + logs: ['[DRY RUN] Signed transaction intercepted'], + }; + return result; + }; + } + return Reflect.get(target, prop, receiver); + }, + }); +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function createDryRunRebalanceAdapter(real: any, logger: Logger, counter: DryRunCounter): any { + return new Proxy(real, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + get(target: any, prop: string | symbol, _receiver: any) { + if (prop === 'getAdapter') { + return (type: SupportedBridge) => { + const adapter = target.getAdapter(type); + // Wrap CCIP adapter to intercept sendSolanaToMainnet + if (type === SupportedBridge.CCIP) { + return new Proxy(adapter, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + get(adapterTarget: any, adapterProp: string | symbol, _adapterReceiver: any) { + if (adapterProp === 'sendSolanaToMainnet') { + return async (sender: string, recipient: string, amount: string) => { + counter.count++; + logger.info('[DRY RUN] Would send Solana→Mainnet CCIP bridge', { + sender, + recipient, + amount, + }); + return { + hash: `dryrun_ccip_sol_${Date.now()}`, + logs: ['[DRY RUN] CCIP bridge intercepted'], + blockNumber: 0, + timestamp: Math.floor(Date.now() / 1000), + from: sender, + }; + }; + } + return Reflect.get(adapterTarget, adapterProp, _adapterReceiver); + }, + }); + } + return adapter; + }; + } + return Reflect.get(target, prop, _receiver); + }, + }); +} diff --git a/packages/poller/src/e2e/runner.ts b/packages/poller/src/e2e/runner.ts new file mode 100644 index 00000000..6854aab4 --- /dev/null +++ b/packages/poller/src/e2e/runner.ts @@ -0,0 +1,351 @@ +// Polyfill crypto for Solana library compatibility +import { webcrypto } from 'crypto'; +if (typeof globalThis.crypto === 'undefined') { + globalThis.crypto = webcrypto as Crypto; +} +if (typeof (global as typeof globalThis & { crypto?: Crypto }).crypto === 'undefined') { + (global as typeof globalThis & { crypto: Crypto }).crypto = webcrypto as Crypto; +} + +import '../polyfills'; +import '../rebalance/registrations'; + +import * as fs from 'fs'; +import * as path from 'path'; +import { randomBytes } from 'crypto'; +import { bytesToHex } from 'viem'; +import { Logger } from '@mark/logger'; +import { MarkConfiguration, loadConfiguration, cleanupHttpConnections } from '@mark/core'; +import { runMigration, validateTokenRebalanceConfig } from '@mark/agent'; +import { cleanupExpiredEarmarks, cleanupExpiredRegularRebalanceOps } from '../rebalance'; +import { initializeAdapters, ProcessingContext, MarkAdapters } from '../init'; +import { getRegisteredRebalancers, RebalancerRegistration } from '../rebalance/registry'; +import { cleanupViemClients } from '../helpers/contracts'; +import * as database from '@mark/database'; +import { E2EConfig, E2EResult } from './types'; +import { + createDryRunChainService, + createDryRunSolanaSigner, + createDryRunRebalanceAdapter, + DryRunCounter, +} from './dry-chain-service'; + +// --- CLI Argument Parsing --- + +function parseArgs(): { runModes: string[]; dryRun: boolean; configPath?: string; sequential: boolean } { + const args = process.argv.slice(2); + let runModes: string[] = []; + let dryRun = process.env.DRY_RUN === 'true'; + let configPath = process.env.E2E_CONFIG_PATH; + let sequential = false; + + for (let i = 0; i < args.length; i++) { + switch (args[i]) { + case '--run-modes': + if (args[i + 1]) { + runModes = args[++i].split(','); + } + break; + case '--dry-run': + dryRun = true; + break; + case '--config': + configPath = args[++i]; + break; + case '--sequential': + sequential = true; + break; + } + } + + return { runModes, dryRun, configPath, sequential }; +} + +function loadE2EConfig(configPath?: string): E2EConfig | undefined { + if (!configPath) return undefined; + + const resolved = path.resolve(configPath); + if (!fs.existsSync(resolved)) { + console.error(`E2E config file not found: ${resolved}`); + process.exit(1); + } + + return JSON.parse(fs.readFileSync(resolved, 'utf-8')) as E2EConfig; +} + +// --- Config Override --- + +function deepMerge(target: T, source: Partial): T { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const result = { ...target } as Record; + for (const key of Object.keys(source as Record)) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const sourceVal = (source as Record)[key]; + if ( + sourceVal !== undefined && + typeof sourceVal === 'object' && + sourceVal !== null && + !Array.isArray(sourceVal) && + typeof result[key] === 'object' && + result[key] !== null + ) { + result[key] = deepMerge(result[key], sourceVal); + } else if (sourceVal !== undefined) { + result[key] = sourceVal; + } + } + return result as T; +} + +function applyOverrides(config: MarkConfiguration, e2eConfig: E2EConfig): void { + if (!e2eConfig.overrides) return; + + if (e2eConfig.overrides.methRebalance && config.methRebalance) { + config.methRebalance = deepMerge(config.methRebalance, e2eConfig.overrides.methRebalance); + } + if (e2eConfig.overrides.tacRebalance && config.tacRebalance) { + config.tacRebalance = deepMerge(config.tacRebalance, e2eConfig.overrides.tacRebalance); + } + if (e2eConfig.overrides.aManUsdeRebalance && config.aManUsdeRebalance) { + config.aManUsdeRebalance = deepMerge(config.aManUsdeRebalance, e2eConfig.overrides.aManUsdeRebalance); + } + if (e2eConfig.overrides.aMansyrupUsdtRebalance && config.aMansyrupUsdtRebalance) { + config.aMansyrupUsdtRebalance = deepMerge( + config.aMansyrupUsdtRebalance, + e2eConfig.overrides.aMansyrupUsdtRebalance, + ); + } + if (e2eConfig.overrides.solanaPtusdeRebalance && config.solanaPtusdeRebalance) { + config.solanaPtusdeRebalance = deepMerge(config.solanaPtusdeRebalance, e2eConfig.overrides.solanaPtusdeRebalance); + } +} + +// --- Cleanup (mirrors init.ts:cleanupAdapters) --- + +async function cleanupAdapters(adapters: MarkAdapters): Promise { + try { + await Promise.all([adapters.purchaseCache.disconnect(), database.closeDatabase()]); + cleanupHttpConnections(); + cleanupViemClients(); + } catch (error) { + adapters.logger.warn('Error during adapter cleanup', { error }); + } +} + +// --- Runner --- + +async function runRebalancer( + rebalancer: RebalancerRegistration, + adapters: MarkAdapters, + config: MarkConfiguration, + counter: DryRunCounter, +): Promise { + const startMs = Date.now(); + const counterBefore = counter.count; + + try { + const context: ProcessingContext = { + ...adapters, + config, + requestId: bytesToHex(randomBytes(32)), + startTime: Math.floor(Date.now() / 1000), + }; + + await cleanupExpiredEarmarks(context); + await cleanupExpiredRegularRebalanceOps(context); + + adapters.logger.info(`Running ${rebalancer.displayName} (${rebalancer.runMode})...`); + + const actions = await rebalancer.handler(context); + + return { + runMode: rebalancer.runMode, + displayName: rebalancer.displayName, + status: 'completed', + actions: actions.length, + dryRunIntercepted: counter.count - counterBefore, + durationMs: Date.now() - startMs, + }; + } catch (error) { + return { + runMode: rebalancer.runMode, + displayName: rebalancer.displayName, + status: 'failed', + actions: 0, + dryRunIntercepted: counter.count - counterBefore, + error: (error as Error).message, + durationMs: Date.now() - startMs, + }; + } +} + +// --- Summary --- + +function printSummary(results: E2EResult[], isDryRun: boolean): void { + console.log('\n=== E2E Rebalancer Test Results ==='); + console.log(`Mode: ${isDryRun ? 'dry-run' : 'live'}\n`); + + const header = ' Run Mode Status Actions Dry-Run TX Duration'; + const separator = ' ' + '-'.repeat(header.length - 2); + + console.log(header); + console.log(separator); + + for (const r of results) { + const mode = r.runMode.padEnd(25); + const status = r.status.toUpperCase().padEnd(11); + const actions = r.status === 'failed' || r.status === 'skipped' ? '-'.padEnd(9) : String(r.actions).padEnd(9); + const dryTx = + r.status === 'failed' || r.status === 'skipped' ? '-'.padEnd(12) : String(r.dryRunIntercepted).padEnd(12); + const duration = r.status === 'skipped' ? '-' : `${(r.durationMs / 1000).toFixed(1)}s`; + + console.log(` ${mode} ${status} ${actions} ${dryTx} ${duration}`); + + if (r.error) { + console.log(` Error: ${r.error}`); + } + } + + const passed = results.filter((r) => r.status === 'completed').length; + const failed = results.filter((r) => r.status === 'failed').length; + const skipped = results.filter((r) => r.status === 'skipped').length; + + console.log(`\nOverall: ${passed} passed, ${failed} failed, ${skipped} skipped\n`); +} + +// --- Main --- + +async function main(): Promise { + const cliArgs = parseArgs(); + const e2eConfig = loadE2EConfig(cliArgs.configPath); + + // Merge CLI args with config file (CLI takes precedence) + const isDryRun = cliArgs.dryRun || e2eConfig?.dryRun || false; + const runModes = cliArgs.runModes.length > 0 ? cliArgs.runModes : e2eConfig?.runModes || ['all']; + const sequential = cliArgs.sequential || e2eConfig?.sequential || false; + + console.log(`E2E Rebalancer Test`); + console.log(` Dry run: ${isDryRun}`); + console.log(` Run modes: ${runModes.join(', ')}`); + console.log(` Sequential: ${sequential}`); + console.log(''); + + // 1. Load production config + const config = await loadConfiguration(); + + const logger = new Logger({ + service: 'mark-e2e', + level: config.logLevel, + }); + + // 2. Apply overrides from e2e config + if (e2eConfig) { + applyOverrides(config, e2eConfig); + logger.info('Applied E2E config overrides'); + } + + // 3. Run database migration + await runMigration(logger); + + // 4. Validate config + validateTokenRebalanceConfig(config, logger); + + // 5. Initialize adapters (same as production) + let adapters: MarkAdapters | undefined; + + try { + adapters = initializeAdapters(config, logger); + + // 6. If dry-run, wrap chain services with proxies + const counter: DryRunCounter = { count: 0 }; + + if (isDryRun) { + adapters.chainService = createDryRunChainService(adapters.chainService, logger, counter); + + if (adapters.fillServiceChainService) { + adapters.fillServiceChainService = createDryRunChainService(adapters.fillServiceChainService, logger, counter); + } + + if (adapters.solanaSigner) { + adapters.solanaSigner = createDryRunSolanaSigner(adapters.solanaSigner, logger, counter); + } + + // Wrap rebalance adapter to intercept CCIP sendSolanaToMainnet (bypasses SolanaSigner) + adapters.rebalance = createDryRunRebalanceAdapter(adapters.rebalance, logger, counter); + + logger.info('Dry-run mode: transaction submission will be intercepted'); + } + + // 7. Resolve target rebalancers + const registered = getRegisteredRebalancers(); + const targets: RebalancerRegistration[] = []; + const results: E2EResult[] = []; + + for (const reg of registered) { + if (runModes.includes('all') || runModes.includes(reg.runMode)) { + // Check if Solana signer is needed but missing + if (reg.runMode === 'solanaUsdcOnly' && !adapters.solanaSigner) { + results.push({ + runMode: reg.runMode, + displayName: reg.displayName, + status: 'skipped', + actions: 0, + dryRunIntercepted: 0, + error: 'SolanaSigner not configured', + durationMs: 0, + }); + continue; + } + targets.push(reg); + } + } + + if (targets.length === 0) { + logger.warn('No rebalancers matched the specified run modes', { runModes }); + printSummary(results, isDryRun); + process.exit(1); + } + + // 8. Execute + if (sequential) { + for (const rebalancer of targets) { + const result = await runRebalancer(rebalancer, adapters, config, counter); + results.push(result); + } + } else { + const settled = await Promise.allSettled(targets.map((r) => runRebalancer(r, adapters!, config, counter))); + for (const s of settled) { + if (s.status === 'fulfilled') { + results.push(s.value); + } else { + // This shouldn't happen since runRebalancer catches errors, but handle it + results.push({ + runMode: 'unknown', + displayName: 'unknown', + status: 'failed', + actions: 0, + dryRunIntercepted: 0, + error: s.reason?.message || 'Unknown error', + durationMs: 0, + }); + } + } + } + + // 9. Print results + printSummary(results, isDryRun); + + // 10. Exit with code + const hasFailed = results.some((r) => r.status === 'failed'); + process.exit(hasFailed ? 1 : 0); + } finally { + if (adapters) { + await cleanupAdapters(adapters); + } + } +} + +main().catch((err) => { + console.error('E2E runner failed:', err); + process.exit(1); +}); diff --git a/packages/poller/src/e2e/types.ts b/packages/poller/src/e2e/types.ts new file mode 100644 index 00000000..ef8285b4 --- /dev/null +++ b/packages/poller/src/e2e/types.ts @@ -0,0 +1,24 @@ +import { TokenRebalanceConfig, SolanaRebalanceConfig } from '@mark/core'; + +export interface E2EConfig { + dryRun: boolean; + runModes: string[]; // e.g., ["methOnly", "tacOnly"] or ["all"] + sequential: boolean; // run one at a time vs parallel + overrides?: { + methRebalance?: Partial; + tacRebalance?: Partial; + aManUsdeRebalance?: Partial; + aMansyrupUsdtRebalance?: Partial; + solanaPtusdeRebalance?: Partial; + }; +} + +export interface E2EResult { + runMode: string; + displayName: string; + status: 'completed' | 'failed' | 'skipped'; + actions: number; + dryRunIntercepted: number; + error?: string; + durationMs: number; +} diff --git a/packages/poller/src/init.ts b/packages/poller/src/init.ts index 6da9dc2d..f9be6e05 100644 --- a/packages/poller/src/init.ts +++ b/packages/poller/src/init.ts @@ -18,9 +18,8 @@ import { RebalanceAdapter } from '@mark/rebalance'; import { cleanupViemClients } from './helpers/contracts'; import * as database from '@mark/database'; import { bytesToHex, WalletClient } from 'viem'; -import { rebalanceMantleEth } from './rebalance/mantleEth'; -import { rebalanceTacUsdt } from './rebalance/tacUsdt'; -import { rebalanceSolanaUsdc } from './rebalance/solanaUsdc'; +import './rebalance/registrations'; +import { getRegisteredRebalancers } from './rebalance/registry'; import { randomBytes } from 'crypto'; export interface MarkAdapters { @@ -113,83 +112,36 @@ export const initPoller = async (): Promise<{ statusCode: number; body: string } logger.debug('Logging run mode of the instance', { runMode: process.env.RUN_MODE }); - if (process.env.RUN_MODE === 'methOnly') { - logger.info('Starting meth rebalancing', { - stage: config.stage, - environment: config.environment, - addresses, - fillServiceAddresses, - }); - - const rebalanceOperations = await rebalanceMantleEth(context); - if (rebalanceOperations.length === 0) { - logger.info('Meth Rebalancing completed: no operations needed', { - requestId: context.requestId, - }); - } else { - logger.info('Successfully completed meth rebalancing operations', { - requestId: context.requestId, - numOperations: rebalanceOperations.length, - operations: rebalanceOperations, - }); - } - - logFileDescriptorUsage(logger); - - return { - statusCode: 200, - body: JSON.stringify({ - rebalanceOperations: rebalanceOperations ?? [], - }), - }; - } + const runMode = process.env.RUN_MODE; + const rebalancer = runMode ? getRegisteredRebalancers().find((r) => r.runMode === runMode) : undefined; - if (process.env.RUN_MODE === 'tacOnly') { - logger.info('Starting TAC USDT rebalancing', { - stage: config.stage, - environment: config.environment, - addresses, - fillServiceAddresses, + if (runMode && !rebalancer && runMode !== 'rebalanceOnly') { + const validModes = getRegisteredRebalancers().map((r) => r.runMode); + logger.error(`Unknown RUN_MODE "${runMode}". Valid modes: ${validModes.join(', ')}, rebalanceOnly`, { + runMode, + validModes, }); - - const rebalanceOperations = await rebalanceTacUsdt(context); - if (rebalanceOperations.length === 0) { - logger.info('TAC USDT Rebalancing completed: no operations needed', { - requestId: context.requestId, - }); - } else { - logger.info('Successfully completed TAC USDT rebalancing operations', { - requestId: context.requestId, - numOperations: rebalanceOperations.length, - operations: rebalanceOperations, - }); - } - - logFileDescriptorUsage(logger); - return { - statusCode: 200, - body: JSON.stringify({ - rebalanceOperations: rebalanceOperations ?? [], - }), + statusCode: 400, + body: JSON.stringify({ error: `Unknown RUN_MODE: ${runMode}` }), }; } - if (process.env.RUN_MODE === 'solanaUsdcOnly') { - logger.info('Starting Solana USDC → ptUSDe rebalancing', { + if (rebalancer) { + logger.info(`Starting ${rebalancer.displayName} rebalancing`, { stage: config.stage, environment: config.environment, addresses, fillServiceAddresses, }); - const rebalanceOperations = await rebalanceSolanaUsdc(context); + const rebalanceOperations = await rebalancer.handler(context); if (rebalanceOperations.length === 0) { - logger.info('Solana USDC Rebalancing completed: no operations needed', { + logger.info(`${rebalancer.displayName} Rebalancing completed: no operations needed`, { requestId: context.requestId, }); } else { - logger.info('Successfully completed Solana USDC rebalancing operations', { + logger.info(`Successfully completed ${rebalancer.displayName} rebalancing operations`, { requestId: context.requestId, numOperations: rebalanceOperations.length, operations: rebalanceOperations, diff --git a/packages/poller/src/rebalance/aManUsde.ts b/packages/poller/src/rebalance/aManUsde.ts new file mode 100644 index 00000000..defb9149 --- /dev/null +++ b/packages/poller/src/rebalance/aManUsde.ts @@ -0,0 +1,37 @@ +import { PostBridgeActionType } from '@mark/core'; +import { ProcessingContext } from '../init'; +import { rebalanceAaveToken, executeAaveTokenCallbacks, AaveTokenFlowDescriptor } from './aaveTokenRebalancer'; + +// Ticker hashes from chaindata/everclear.json +const AMANUSDE_TICKER_HASH = '0x66ccba55361fa110a5bbf2242ca4587de7dbe4596f981363a6a87711889904ac'; +const USDE_TICKER_HASH = '0x01c5070cf4f26b1dca38a8754c64483958f5dd08799ad2d72067b3ff2985b82c'; +const USDC_TICKER_HASH = '0xd6aca1be9729c13d677335161321649cccae6a591554772516700f986f942eaa'; + +const aManUsdeDescriptor: AaveTokenFlowDescriptor = { + name: 'aManUSDe', + aTokenTickerHash: AMANUSDE_TICKER_HASH, + intermediateTokenTickerHash: USDE_TICKER_HASH, + sourceTokenTickerHash: USDC_TICKER_HASH, + bridgeTag: 'stargate-amanusde', + getConfig: (config) => config.aManUsdeRebalance, + buildPostBridgeActions: ({ sourceTokenOnMantle, intermediateTokenOnMantle, aavePoolAddress, dexSwapSlippageBps }) => [ + { + type: PostBridgeActionType.DexSwap as const, + sellToken: sourceTokenOnMantle, + buyToken: intermediateTokenOnMantle, + slippageBps: dexSwapSlippageBps, + }, + { + type: PostBridgeActionType.AaveSupply as const, + poolAddress: aavePoolAddress, + supplyAsset: intermediateTokenOnMantle, + }, + ], + getAavePoolAddress: () => process.env.AMANUSDE_AAVE_POOL_ADDRESS, + getDexSwapSlippageBps: () => parseInt(process.env.AMANUSDE_DEX_SWAP_SLIPPAGE_BPS ?? '', 10) || 100, +}; + +export const rebalanceAManUsde = (context: ProcessingContext) => rebalanceAaveToken(context, aManUsdeDescriptor); + +export const executeAManUsdeCallbacks = (context: ProcessingContext) => + executeAaveTokenCallbacks(context, aManUsdeDescriptor); diff --git a/packages/poller/src/rebalance/aMansyrupUsdt.ts b/packages/poller/src/rebalance/aMansyrupUsdt.ts new file mode 100644 index 00000000..c0d00d97 --- /dev/null +++ b/packages/poller/src/rebalance/aMansyrupUsdt.ts @@ -0,0 +1,38 @@ +import { PostBridgeActionType } from '@mark/core'; +import { ProcessingContext } from '../init'; +import { rebalanceAaveToken, executeAaveTokenCallbacks, AaveTokenFlowDescriptor } from './aaveTokenRebalancer'; + +// Ticker hashes from chaindata/everclear.json +const AMANSYRUPUSDT_TICKER_HASH = '0x50754231141ed10c02426fd810290fe327a8ea327cf763ea23aa37d0c1baa32e'; +const SYRUPUSDT_TICKER_HASH = '0x7bb29d70724bbe7b0958c9fa41e525d57faa30c509988884b3f212b9108edd0e'; +const USDC_TICKER_HASH = '0xd6aca1be9729c13d677335161321649cccae6a591554772516700f986f942eaa'; + +const aMansyrupUsdtDescriptor: AaveTokenFlowDescriptor = { + name: 'aMansyrupUSDT', + aTokenTickerHash: AMANSYRUPUSDT_TICKER_HASH, + intermediateTokenTickerHash: SYRUPUSDT_TICKER_HASH, + sourceTokenTickerHash: USDC_TICKER_HASH, + bridgeTag: 'stargate-amansyrupusdt', + getConfig: (config) => config.aMansyrupUsdtRebalance, + buildPostBridgeActions: ({ sourceTokenOnMantle, intermediateTokenOnMantle, aavePoolAddress, dexSwapSlippageBps }) => [ + { + type: PostBridgeActionType.DexSwap as const, + sellToken: sourceTokenOnMantle, + buyToken: intermediateTokenOnMantle, + slippageBps: dexSwapSlippageBps, + }, + { + type: PostBridgeActionType.AaveSupply as const, + poolAddress: aavePoolAddress, + supplyAsset: intermediateTokenOnMantle, + }, + ], + getAavePoolAddress: () => process.env.AMANSYRUPUSDT_AAVE_POOL_ADDRESS, + getDexSwapSlippageBps: () => parseInt(process.env.AMANSYRUPUSDT_DEX_SWAP_SLIPPAGE_BPS ?? '', 10) || 100, +}; + +export const rebalanceAMansyrupUsdt = (context: ProcessingContext) => + rebalanceAaveToken(context, aMansyrupUsdtDescriptor); + +export const executeAMansyrupUsdtCallbacks = (context: ProcessingContext) => + executeAaveTokenCallbacks(context, aMansyrupUsdtDescriptor); diff --git a/packages/poller/src/rebalance/aaveTokenRebalancer.ts b/packages/poller/src/rebalance/aaveTokenRebalancer.ts new file mode 100644 index 00000000..78d56ef4 --- /dev/null +++ b/packages/poller/src/rebalance/aaveTokenRebalancer.ts @@ -0,0 +1,630 @@ +import { TransactionReceipt as ViemTransactionReceipt } from 'viem'; +import { getEvmBalance, safeParseBigInt, convertToNativeUnits } from '../helpers'; +import { jsonifyError } from '@mark/logger'; +import { + getDecimalsFromConfig, + getTokenAddressFromConfig, + MarkConfiguration, + RebalanceOperationStatus, + DBPS_MULTIPLIER, + RebalanceAction, + SupportedBridge, + MAINNET_CHAIN_ID, + MANTLE_CHAIN_ID, + PostBridgeActionConfig, + TokenRebalanceConfig, +} from '@mark/core'; +import { ProcessingContext } from '../init'; +import { getValidatedZodiacConfig } from '../helpers/zodiac'; +import { submitTransactionWithLogging } from '../helpers/transactions'; +import { buildTransactionsForAction } from '@mark/rebalance'; +import { TransactionReceipt } from '@mark/database'; +import { getBridgeTypeFromTag } from './helpers'; +import { RebalanceRunState } from './types'; +import { runThresholdRebalance, ThresholdRebalanceDescriptor } from './thresholdEngine'; +import { runCallbackLoop, RebalanceOperation } from './callbackEngine'; +import { executeEvmBridge } from './bridgeExecution'; + +/** + * Descriptor that parameterizes the generic Aave token rebalancer for a specific flow. + */ +export interface AaveTokenFlowDescriptor { + /** Human-readable name, e.g., 'aManUSDe', 'aMansyrupUSDT' */ + name: string; + /** Destination aToken ticker hash */ + aTokenTickerHash: string; + /** Intermediate token ticker hash (e.g., USDe or syrupUSDT) */ + intermediateTokenTickerHash: string; + /** Source token ticker hash bridged from mainnet (e.g., USDC) */ + sourceTokenTickerHash: string; + /** DB tag for identifying operations, e.g., 'stargate-amanusde' */ + bridgeTag: string; + /** Extract the relevant TokenRebalanceConfig from the overall config */ + getConfig: (config: MarkConfiguration) => TokenRebalanceConfig | undefined; + /** Build the post-bridge action pipeline (DexSwap + AaveSupply) */ + buildPostBridgeActions: (params: { + sourceTokenOnMantle: string; + intermediateTokenOnMantle: string; + aavePoolAddress: string; + dexSwapSlippageBps: number; + }) => PostBridgeActionConfig[]; + /** Get the Aave Pool address from env */ + getAavePoolAddress: () => string | undefined; + /** Get the DEX swap slippage from env */ + getDexSwapSlippageBps: () => number; +} + +/** + * Main entry point for Aave token threshold-based rebalancing. + * + * Flow: Source Token (ETH) -> Stargate -> Source Token (Mantle) -> DEX Swap -> Intermediate Token -> Aave Supply -> aToken + */ +export async function rebalanceAaveToken( + context: ProcessingContext, + descriptor: AaveTokenFlowDescriptor, +): Promise { + const { logger, requestId, config, rebalance } = context; + const actions: RebalanceAction[] = []; + + // Always process callbacks first to complete in-flight operations + await executeAaveTokenCallbacks(context, descriptor); + + const tokenConfig = descriptor.getConfig(config); + if (!tokenConfig?.enabled) { + logger.debug(`${descriptor.name} rebalancing disabled`, { requestId }); + return actions; + } + + const isPaused = await rebalance.isPaused(); + if (isPaused) { + logger.warn('Rebalance loop is paused', { requestId }); + return actions; + } + + // Validate required config + const validationErrors: string[] = []; + if (!tokenConfig.fillService?.address) { + validationErrors.push('fillService.address is required'); + } + if (!tokenConfig.bridge?.minRebalanceAmount) { + validationErrors.push('bridge.minRebalanceAmount is required'); + } + if (!descriptor.getAavePoolAddress()) { + validationErrors.push(`Aave pool address env var is not set (post-bridge supply will fail)`); + } + if (validationErrors.length > 0) { + logger.error(`${descriptor.name} rebalance configuration validation failed`, { + requestId, + errors: validationErrors, + }); + return actions; + } + + logger.info(`Starting ${descriptor.name} rebalancing`, { + requestId, + ownAddress: config.ownAddress, + wallets: { + fillService: { + address: tokenConfig.fillService.address, + senderAddress: tokenConfig.fillService.senderAddress, + thresholdEnabled: tokenConfig.fillService.thresholdEnabled, + threshold: tokenConfig.fillService.threshold, + targetBalance: tokenConfig.fillService.targetBalance, + }, + }, + }); + + const runState: RebalanceRunState = { committedAmount: 0n }; + + const fsActions = await evaluateThresholdRebalance(context, descriptor, runState); + actions.push(...fsActions); + + logger.info(`Completed ${descriptor.name} rebalancing cycle`, { + requestId, + totalActions: actions.length, + totalCommitted: runState.committedAmount.toString(), + }); + + return actions; +} + +/** + * Evaluate Fill Service threshold rebalancing for an Aave token. + * + * Builds a ThresholdRebalanceDescriptor from the AaveTokenFlowDescriptor + * and delegates to the shared threshold engine. + */ +export const evaluateThresholdRebalance = async ( + context: ProcessingContext, + descriptor: AaveTokenFlowDescriptor, + runState: RebalanceRunState, +): Promise => { + const { config, prometheus, database: db } = context; + const tokenConfig = descriptor.getConfig(config)!; + const fsConfig = tokenConfig.fillService; + const bridgeConfig = tokenConfig.bridge; + const fsSenderAddress = fsConfig.senderAddress ?? fsConfig.address; + + const sourceTokenDecimals = getDecimalsFromConfig(descriptor.sourceTokenTickerHash, MAINNET_CHAIN_ID, config); + const sourceTokenAddress = getTokenAddressFromConfig(descriptor.sourceTokenTickerHash, MAINNET_CHAIN_ID, config); + const aTokenAddress = getTokenAddressFromConfig(descriptor.aTokenTickerHash, MANTLE_CHAIN_ID, config); + const aTokenDecimals = getDecimalsFromConfig(descriptor.aTokenTickerHash, MANTLE_CHAIN_ID, config); + + const thresholdDescriptor: ThresholdRebalanceDescriptor = { + name: descriptor.name, + + isEnabled: () => fsConfig.thresholdEnabled, + + hasInFlightOperations: async () => { + const { operations } = await db.getRebalanceOperations(undefined, undefined, { + status: [ + RebalanceOperationStatus.PENDING, + RebalanceOperationStatus.AWAITING_CALLBACK, + RebalanceOperationStatus.AWAITING_POST_BRIDGE, + ], + bridge: descriptor.bridgeTag, + earmarkId: null, + }); + if (operations.length > 0) { + context.logger.info(`Found ${operations.length} in-flight ${descriptor.name} rebalance operations, skipping`, { + requestId: context.requestId, + inFlightCount: operations.length, + }); + } + return operations.length > 0; + }, + + getRecipientBalance: async (_ctx) => { + if (!aTokenAddress || !aTokenDecimals) { + throw new Error( + `${descriptor.name} token not found in chain config for Mantle (tickerHash: ${descriptor.aTokenTickerHash})`, + ); + } + const balance = await getEvmBalance( + config, + MANTLE_CHAIN_ID, + fsConfig.address!, + aTokenAddress, + aTokenDecimals, + prometheus, + ); + context.logger.info(`Checking FS receiver ${descriptor.name} balance`, { + requestId: context.requestId, + fillServiceAddress: fsConfig.address, + senderAddress: fsSenderAddress, + fsReceiverBalance: balance.toString(), + }); + return balance; + }, + + getThresholds: () => ({ + threshold: safeParseBigInt(fsConfig.threshold), + target: safeParseBigInt(fsConfig.targetBalance), + }), + + convertShortfallToBridgeAmount: async (shortfall) => { + if (!sourceTokenDecimals) { + throw new Error('Source token decimals not found in chain config for mainnet'); + } + return convertToNativeUnits(shortfall, sourceTokenDecimals); + }, + + getSenderBalance: async () => { + if (!fsSenderAddress || !sourceTokenAddress || !sourceTokenDecimals) { + context.logger.error('Source token config missing for mainnet', { + requestId: context.requestId, + fsSenderAddress, + sourceTokenAddress, + sourceTokenDecimals, + }); + return 0n; + } + const balance = await getEvmBalance( + config, + MAINNET_CHAIN_ID, + fsSenderAddress, + sourceTokenAddress, + sourceTokenDecimals, + prometheus, + ); + return convertToNativeUnits(balance, sourceTokenDecimals); + }, + + getAmountCaps: () => ({ + min: safeParseBigInt(bridgeConfig.minRebalanceAmount), + max: bridgeConfig.maxRebalanceAmount ? safeParseBigInt(bridgeConfig.maxRebalanceAmount) : undefined, + }), + + executeBridge: async (ctx, amount) => { + return executeStargateBridgeForAaveToken(ctx, descriptor, fsSenderAddress!, fsConfig.address!, amount); + }, + }; + + return runThresholdRebalance(context, thresholdDescriptor, runState); +}; + +/** + * Execute Stargate bridge: source token from ETH to Mantle. + */ +export const executeStargateBridgeForAaveToken = async ( + context: ProcessingContext, + descriptor: AaveTokenFlowDescriptor, + senderAddress: string, + recipientAddress: string, + amount: bigint, +): Promise => { + const { config, chainService, fillServiceChainService, logger, requestId, rebalance } = context; + const tokenConfig = descriptor.getConfig(config)!; + const bridgeConfig = tokenConfig.bridge; + + const bridgeType = SupportedBridge.Stargate; + const adapter = rebalance.getAdapter(bridgeType); + if (!adapter) { + logger.error('Stargate adapter not found', { requestId }); + return []; + } + + // Select the correct chain service based on whether the sender is the fill service address + const fsConfig = tokenConfig.fillService; + const fillerSenderAddress = fsConfig.senderAddress ?? fsConfig.address; + const isFillerSender = senderAddress.toLowerCase() === fillerSenderAddress?.toLowerCase(); + const selectedChainService = isFillerSender && fillServiceChainService ? fillServiceChainService : chainService; + + if (isFillerSender && !fillServiceChainService) { + logger.error(`Fill service chain service not available but sender is fill service address for ${descriptor.name}`, { + requestId, + senderAddress, + fillerSenderAddress, + }); + return []; + } + + const sourceTokenAddress = getTokenAddressFromConfig(descriptor.sourceTokenTickerHash, MAINNET_CHAIN_ID, config)!; + const slippageDbps = bridgeConfig.slippageDbps ?? 500; + + const route = { + asset: sourceTokenAddress, + origin: Number(MAINNET_CHAIN_ID), + destination: Number(MANTLE_CHAIN_ID), + maximum: amount.toString(), + slippagesDbps: [slippageDbps], + preferences: [bridgeType], + reserve: '0', + }; + + logger.info(`Attempting Stargate bridge for ${descriptor.name}`, { + requestId, + bridgeType, + amount: amount.toString(), + senderAddress, + recipientAddress, + usingFillServiceSigner: isFillerSender, + route, + }); + + try { + const result = await executeEvmBridge({ + context, + adapter, + route, + amount, + sender: senderAddress, + recipient: recipientAddress, + slippageTolerance: BigInt(slippageDbps), + slippageMultiplier: DBPS_MULTIPLIER, + chainService: selectedChainService, + senderConfig: { + address: senderAddress, + label: isFillerSender ? 'fill-service' : 'market-maker', + }, + dbRecord: { + earmarkId: null, + tickerHash: descriptor.sourceTokenTickerHash, + bridgeTag: descriptor.bridgeTag, + status: RebalanceOperationStatus.PENDING, + }, + label: `Stargate ${descriptor.name}`, + }); + return result.actions; + } catch (error) { + logger.error(`Failed to execute Stargate bridge for ${descriptor.name}`, { + requestId, + route, + error: jsonifyError(error), + }); + return []; + } +}; + +/** + * Callback handler for in-flight Aave token rebalance operations. + * + * Uses the shared callback engine with a processOperation that handles the state machine: + * PENDING -> readyOnDestination check -> AWAITING_CALLBACK + * AWAITING_CALLBACK -> destinationCallback -> AWAITING_POST_BRIDGE + * AWAITING_POST_BRIDGE -> DexSwap + AaveSupply -> COMPLETED + */ +export const executeAaveTokenCallbacks = async ( + context: ProcessingContext, + descriptor: AaveTokenFlowDescriptor, +): Promise => { + return runCallbackLoop(context, { + name: descriptor.name, + bridge: descriptor.bridgeTag, + statuses: [ + RebalanceOperationStatus.PENDING, + RebalanceOperationStatus.AWAITING_CALLBACK, + RebalanceOperationStatus.AWAITING_POST_BRIDGE, + ], + processOperation: (operation, ctx) => processAaveTokenOperation(operation, ctx, descriptor), + }); +}; + +/** + * Process a single in-flight Aave token operation through its state machine. + */ +async function processAaveTokenOperation( + operation: RebalanceOperation, + context: ProcessingContext, + descriptor: AaveTokenFlowDescriptor, +): Promise { + const { logger, requestId, config, rebalance, chainService, fillServiceChainService, database: db } = context; + const logContext = { + requestId, + operationId: operation.id, + originChain: operation.originChainId, + destinationChain: operation.destinationChainId, + status: operation.status, + }; + + // Determine if this is for Fill Service or Market Maker based on recipient + const tokenConfig = descriptor.getConfig(config); + const fsAddress = tokenConfig?.fillService?.address; + const isForFillService = operation.recipient?.toLowerCase() === fsAddress?.toLowerCase(); + const fillerSenderAddress = tokenConfig?.fillService?.senderAddress ?? fsAddress; + const selectedSender = isForFillService && fillerSenderAddress ? fillerSenderAddress : config.ownAddress; + const selectedChainService = isForFillService && fillServiceChainService ? fillServiceChainService : chainService; + + if (isForFillService && !fillServiceChainService) { + logger.error(`Fill service chain service not available for ${descriptor.name} callback, skipping`, { + ...logContext, + recipient: operation.recipient, + fsAddress, + }); + return; + } + + const bridgeType = operation.bridge ? getBridgeTypeFromTag(operation.bridge) : undefined; + const adapter = bridgeType ? rebalance.getAdapter(bridgeType) : undefined; + + if (!adapter) { + logger.warn('Adapter not found for bridge type', { ...logContext, bridgeType }); + return; + } + + // Get origin transaction receipt + const originTx = operation.transactions?.[operation.originChainId] as + | { transactionHash: string; metadata?: { receipt?: TransactionReceipt } } + | undefined; + const receipt = originTx?.metadata?.receipt; + + // --- Handle PENDING: check if bridge completed on destination --- + if (operation.status === RebalanceOperationStatus.PENDING) { + if (!receipt) { + logger.info('Origin transaction receipt not found for operation', logContext); + return; + } + + try { + const route = { + origin: operation.originChainId, + destination: operation.destinationChainId, + asset: getTokenAddressFromConfig(operation.tickerHash, operation.originChainId.toString(), config) || '', + }; + + const ready = await adapter.readyOnDestination( + operation.amount, + route, + receipt as unknown as ViemTransactionReceipt, + ); + + if (ready) { + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.AWAITING_CALLBACK, + }); + logger.info('Stargate bridge ready on destination, updated to AWAITING_CALLBACK', logContext); + operation.status = RebalanceOperationStatus.AWAITING_CALLBACK; + } else { + logger.info('Stargate bridge not yet ready on destination', logContext); + return; + } + } catch (e) { + logger.error('Failed to check readyOnDestination', { ...logContext, error: jsonifyError(e) }); + return; + } + } + + // --- Handle AWAITING_CALLBACK: execute destination callback, transition to post-bridge --- + if (operation.status === RebalanceOperationStatus.AWAITING_CALLBACK) { + try { + const route = { + origin: operation.originChainId, + destination: operation.destinationChainId, + asset: getTokenAddressFromConfig(operation.tickerHash, operation.originChainId.toString(), config) || '', + }; + + let callback = null; + if (receipt) { + try { + callback = await adapter.destinationCallback(route, receipt as unknown as ViemTransactionReceipt); + } catch (e) { + logger.error('Failed to retrieve destination callback', { ...logContext, error: jsonifyError(e) }); + return; + } + } + + if (callback) { + const callbackSender = operation.recipient ?? selectedSender; + const destinationChainConfig = config.chains[operation.destinationChainId]; + const zodiacConfig = getValidatedZodiacConfig(destinationChainConfig, logger, logContext); + + const tx = await submitTransactionWithLogging({ + chainService: selectedChainService, + logger, + chainId: operation.destinationChainId.toString(), + txRequest: { + chainId: operation.destinationChainId, + to: callback.transaction.to!, + data: callback.transaction.data!, + value: (callback.transaction.value ?? BigInt(0)).toString(), + from: callbackSender, + funcSig: callback.transaction.funcSig || '', + }, + zodiacConfig, + context: { ...logContext, callbackType: `destination: ${callback.memo}` }, + }); + + logger.info('Successfully submitted destination callback', { + ...logContext, + transactionHash: tx.hash, + }); + } else { + logger.info('No destination callback required for Stargate', logContext); + } + + // Transition to AWAITING_POST_BRIDGE for DexSwap + AaveSupply + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.AWAITING_POST_BRIDGE, + }); + logger.info('Transitioned to AWAITING_POST_BRIDGE for post-bridge actions', logContext); + operation.status = RebalanceOperationStatus.AWAITING_POST_BRIDGE; + } catch (e) { + logger.error('Failed to process AWAITING_CALLBACK', { ...logContext, error: jsonifyError(e) }); + return; + } + } + + // --- Handle AWAITING_POST_BRIDGE: execute DexSwap + AaveSupply --- + if (operation.status === RebalanceOperationStatus.AWAITING_POST_BRIDGE) { + const aavePoolAddress = descriptor.getAavePoolAddress(); + const dexSwapSlippageBps = descriptor.getDexSwapSlippageBps(); + + if (!aavePoolAddress) { + logger.error(`Aave pool address not set for ${descriptor.name}, cannot execute post-bridge actions`, logContext); + return; + } + + const sourceTokenOnMantle = getTokenAddressFromConfig(descriptor.sourceTokenTickerHash, MANTLE_CHAIN_ID, config); + const intermediateTokenOnMantle = getTokenAddressFromConfig( + descriptor.intermediateTokenTickerHash, + MANTLE_CHAIN_ID, + config, + ); + + if (!sourceTokenOnMantle || !intermediateTokenOnMantle) { + const availableAssets = (config.chains[MANTLE_CHAIN_ID]?.assets ?? []).map((a) => a.symbol); + logger.error('Source or intermediate token address not found in chain config for Mantle', { + ...logContext, + sourceTokenOnMantle, + intermediateTokenOnMantle, + sourceTokenTickerHash: descriptor.sourceTokenTickerHash, + intermediateTokenTickerHash: descriptor.intermediateTokenTickerHash, + availableAssetsOnMantle: availableAssets, + }); + return; + } + + const postBridgeActions = descriptor.buildPostBridgeActions({ + sourceTokenOnMantle, + intermediateTokenOnMantle, + aavePoolAddress, + dexSwapSlippageBps, + }); + + // Use operation.recipient — that's where the bridge deposits tokens on the + // destination chain, so balance/allowance checks must target that address. + const actualSender = operation.recipient ?? selectedSender; + + try { + logger.info(`Executing post-bridge actions for ${descriptor.name}`, { + ...logContext, + actionCount: postBridgeActions.length, + sourceTokenOnMantle, + intermediateTokenOnMantle, + aavePoolAddress, + dexSwapSlippageBps, + }); + + let currentAmount = operation.amount; + + for (let i = 0; i < postBridgeActions.length; i++) { + const action = postBridgeActions[i]; + + logger.info('Building transactions for post-bridge action', { + ...logContext, + actionIndex: i, + actionType: action.type, + currentAmount, + }); + + const actionTxs = await buildTransactionsForAction( + actualSender, + currentAmount, + operation.destinationChainId, + action, + config.chains, + logger, + config.quoteServiceUrl, + ); + + if (actionTxs.length === 0) { + // Use maxUint256 so subsequent actions determine amount from on-chain balance + currentAmount = (2n ** 256n - 1n).toString(); + logger.info('Post-bridge action returned no transactions, advancing to next action', { + ...logContext, + actionIndex: i, + actionType: action.type, + }); + continue; + } + + const destChainConfig = config.chains[operation.destinationChainId]; + const postBridgeZodiacConfig = getValidatedZodiacConfig(destChainConfig, logger, logContext); + + for (const actionTx of actionTxs) { + await submitTransactionWithLogging({ + chainService: selectedChainService, + logger, + chainId: operation.destinationChainId.toString(), + txRequest: { + chainId: operation.destinationChainId, + to: actionTx.transaction.to!, + data: actionTx.transaction.data!, + value: (actionTx.transaction.value ?? BigInt(0)).toString(), + from: actualSender, + funcSig: actionTx.transaction.funcSig || '', + }, + zodiacConfig: postBridgeZodiacConfig, + context: { ...logContext, callbackType: `post-bridge: ${actionTx.memo}` }, + }); + + if (actionTx.effectiveAmount) { + currentAmount = actionTx.effectiveAmount; + } + } + } + + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.COMPLETED, + }); + + logger.info(`${descriptor.name} post-bridge actions completed successfully`, logContext); + } catch (e) { + // Leave as AWAITING_POST_BRIDGE for retry on next poll cycle + logger.error('Failed to execute post-bridge actions, will retry', { + ...logContext, + error: jsonifyError(e), + }); + } + } +} diff --git a/packages/poller/src/rebalance/bridgeExecution.ts b/packages/poller/src/rebalance/bridgeExecution.ts new file mode 100644 index 00000000..ee143f59 --- /dev/null +++ b/packages/poller/src/rebalance/bridgeExecution.ts @@ -0,0 +1,304 @@ +/** + * Shared EVM bridge execution helpers. + * + * Two-level API: + * Level 1 — submitBridgeTransactions: tx submission loop only + * Level 2 — executeEvmBridge: full quote → slippage → send → submit → DB record + */ + +import { jsonifyError } from '@mark/logger'; +import { RebalanceOperationStatus, RebalanceAction, SupportedBridge, WalletType, WalletConfig } from '@mark/core'; +import { ProcessingContext } from '../init'; +import { submitTransactionWithLogging } from '../helpers/transactions'; +import { MemoizedTransactionRequest, RebalanceTransactionMemo, BridgeAdapter } from '@mark/rebalance'; +import { createRebalanceOperation, TransactionReceipt } from '@mark/database'; +import { ChainService } from '@mark/chainservice'; +import { SenderConfig } from './types'; + +// --------------------------------------------------------------------------- +// Level 1: submitBridgeTransactions — tx submission loop +// --------------------------------------------------------------------------- + +export interface SubmitBridgeTxsParams { + context: Pick; + chainService: ChainService; + route: { origin: number; destination: number; asset: string }; + bridgeType: SupportedBridge; + bridgeTxRequests: MemoizedTransactionRequest[]; + amountToBridge: bigint; + senderOverride?: SenderConfig; + zodiacConfig?: WalletConfig; +} + +export interface SubmitBridgeTxsResult { + receipt?: TransactionReceipt; + effectiveBridgedAmount: string; +} + +/** + * Loops through bridge transaction requests, submits each via `submitTransactionWithLogging`, + * captures the receipt from the `Rebalance` memo tx, and tracks the effective amount. + */ +export const submitBridgeTransactions = async ({ + context, + chainService, + route, + bridgeType, + bridgeTxRequests, + amountToBridge, + senderOverride, + zodiacConfig = { walletType: WalletType.EOA }, +}: SubmitBridgeTxsParams): Promise => { + const { logger, config, requestId } = context; + + const senderAddress = senderOverride?.address ?? config.ownAddress; + const senderLabel = senderOverride?.label ?? 'market-maker'; + + let idx = -1; + let effectiveBridgedAmount = amountToBridge.toString(); + let receipt: TransactionReceipt | undefined; + + for (const { transaction, memo, effectiveAmount } of bridgeTxRequests) { + idx++; + logger.info('Submitting bridge transaction', { + requestId, + route, + bridgeType, + transactionIndex: idx, + totalTransactions: bridgeTxRequests.length, + transaction, + memo, + amountToBridge, + sender: senderAddress, + senderType: senderLabel, + }); + + const result = await submitTransactionWithLogging({ + chainService, + logger, + chainId: route.origin.toString(), + txRequest: { + to: transaction.to!, + data: transaction.data!, + value: (transaction.value || 0).toString(), + chainId: route.origin, + from: senderAddress, + funcSig: transaction.funcSig || '', + }, + zodiacConfig, + context: { requestId, route, bridgeType, transactionType: memo, sender: senderLabel }, + }); + + logger.info('Successfully submitted bridge transaction', { + requestId, + route, + bridgeType, + transactionIndex: idx, + totalTransactions: bridgeTxRequests.length, + transactionHash: result.hash, + memo, + amountToBridge, + }); + + if (memo !== RebalanceTransactionMemo.Rebalance) { + continue; + } + + receipt = result.receipt! as unknown as TransactionReceipt; + if (effectiveAmount) { + effectiveBridgedAmount = effectiveAmount; + logger.info('Using effective bridged amount from adapter', { + requestId, + originalAmount: amountToBridge.toString(), + effectiveAmount: effectiveBridgedAmount, + bridgeType, + }); + } + } + + return { receipt, effectiveBridgedAmount }; +}; + +// --------------------------------------------------------------------------- +// Level 2: executeEvmBridge — full 5-step pattern +// --------------------------------------------------------------------------- + +export interface ExecuteEvmBridgeParams { + context: ProcessingContext; + adapter: BridgeAdapter; + route: { + origin: number; + destination: number; + asset: string; + maximum?: string; + slippagesDbps?: number[]; + preferences?: SupportedBridge[]; + reserve?: string; + }; + amount: bigint; // in adapter-expected units (caller converts) + dbAmount?: bigint; // if set, overrides `amount` for DB record + action tracking (e.g. 18-decimal when amount is native-unit) + sender: string; + recipient: string; // used for adapter.send() and DB record + dbRecipient?: string; // if set, overrides `recipient` in DB record only + slippageTolerance: bigint; + slippageMultiplier: bigint; + chainService: ChainService; + senderConfig?: SenderConfig; + zodiacConfig?: WalletConfig; + dbRecord: { + earmarkId: string | null; + tickerHash: string; + bridgeTag: string; + status: RebalanceOperationStatus; + }; + label: string; +} + +export interface ExecuteEvmBridgeResult { + actions: RebalanceAction[]; + receipt?: TransactionReceipt; + effectiveBridgedAmount: string; +} + +/** + * Full 5-step EVM bridge execution: + * 1. Get quote from adapter + * 2. Check slippage tolerance + * 3. Get bridge transaction requests via adapter.send() + * 4. Submit transactions via submitBridgeTransactions() + * 5. Create DB rebalance operation record + * + * Returns empty `actions` on quote failure or slippage violation (no throw). + * Throws on tx submission failure (caller catches). + */ +export const executeEvmBridge = async ({ + context, + adapter, + route, + amount, + dbAmount, + sender, + recipient, + dbRecipient, + slippageTolerance, + slippageMultiplier, + chainService, + senderConfig, + zodiacConfig, + dbRecord, + label, +}: ExecuteEvmBridgeParams): Promise => { + const { logger, requestId } = context; + const bridgeType = adapter.type(); + const effectiveDbRecipient = dbRecipient ?? recipient; + // When adapter amount units differ from DB/tracking units (e.g. 6-decimal native vs 18-decimal normalized), + // callers pass dbAmount to preserve the original tracking unit. + const trackingAmount = dbAmount ?? amount; + const empty: ExecuteEvmBridgeResult = { actions: [], effectiveBridgedAmount: '0' }; + + // Step 1: Get quote + let receivedAmountStr: string; + try { + receivedAmountStr = await adapter.getReceivedAmount(amount.toString(), route); + logger.info(`Received ${label} quote`, { + requestId, + bridgeType, + amountToBridge: amount.toString(), + receivedAmount: receivedAmountStr, + }); + } catch (quoteError) { + logger.error(`Failed to get ${label} quote`, { + requestId, + bridgeType, + amountToBridge: amount.toString(), + error: jsonifyError(quoteError), + }); + return empty; + } + + // Step 2: Check slippage + const receivedAmount = BigInt(receivedAmountStr); + const minimumAcceptableAmount = amount - (amount * slippageTolerance) / slippageMultiplier; + + if (receivedAmount < minimumAcceptableAmount) { + logger.warn(`${label} quote does not meet slippage requirements`, { + requestId, + bridgeType, + amountToBridge: amount.toString(), + receivedAmount: receivedAmount.toString(), + minimumAcceptableAmount: minimumAcceptableAmount.toString(), + slippageTolerance: slippageTolerance.toString(), + }); + return empty; + } + + // Step 3: Get bridge transaction requests + let bridgeTxRequests: MemoizedTransactionRequest[]; + try { + bridgeTxRequests = await adapter.send(sender, recipient, amount.toString(), route); + if (!bridgeTxRequests.length) { + logger.error(`No bridge transactions returned from ${label} adapter`, { requestId }); + return empty; + } + logger.info(`Prepared ${label} bridge transactions`, { + requestId, + transactionCount: bridgeTxRequests.length, + }); + } catch (sendError) { + logger.error(`Failed to get ${label} bridge transactions`, { + requestId, + bridgeType, + error: jsonifyError(sendError), + }); + return empty; + } + + // Step 4: Submit bridge transactions + // Use trackingAmount (18-decimal normalized) as default for effectiveBridgedAmount, + // not the adapter-unit amount, so DB records and committed-funds tracking stay consistent. + const { receipt, effectiveBridgedAmount } = await submitBridgeTransactions({ + context, + chainService, + route, + bridgeType, + bridgeTxRequests, + amountToBridge: trackingAmount, + senderOverride: senderConfig, + zodiacConfig, + }); + + // Step 5: Create database record + await createRebalanceOperation({ + earmarkId: dbRecord.earmarkId, + originChainId: route.origin, + destinationChainId: route.destination, + tickerHash: dbRecord.tickerHash, + amount: effectiveBridgedAmount, + slippage: Number(slippageTolerance), + status: dbRecord.status, + bridge: dbRecord.bridgeTag, + transactions: receipt ? { [route.origin]: receipt } : undefined, + recipient: effectiveDbRecipient, + }); + + logger.info(`Successfully created ${label} rebalance operation`, { + requestId, + originTxHash: receipt?.transactionHash, + amountToBridge: effectiveBridgedAmount, + bridge: dbRecord.bridgeTag, + }); + + const actions: RebalanceAction[] = [ + { + bridge: bridgeType, + amount: trackingAmount.toString(), + origin: route.origin, + destination: route.destination, + asset: route.asset, + transaction: receipt?.transactionHash || '', + recipient: effectiveDbRecipient, + }, + ]; + + return { actions, receipt, effectiveBridgedAmount }; +}; diff --git a/packages/poller/src/rebalance/callbackEngine.ts b/packages/poller/src/rebalance/callbackEngine.ts new file mode 100644 index 00000000..d7cc8a2b --- /dev/null +++ b/packages/poller/src/rebalance/callbackEngine.ts @@ -0,0 +1,129 @@ +/** + * Generic callback engine for processing in-flight rebalance operations. + * + * Captures the shared lifecycle across all rebalancers: + * fetch in-flight ops → iterate → check timeout → delegate to processOperation + * + * Each rebalancer provides a descriptor with the parts that differ + * (bridge tag, statuses to query, per-operation processing logic). + */ +import { RebalanceOperationStatus } from '@mark/core'; +import { jsonifyError } from '@mark/logger'; +import type { CamelCasedProperties } from 'type-fest'; +import type { rebalance_operations, TransactionEntry } from '@mark/database'; +import { ProcessingContext } from '../init'; +import { isOperationTimedOut, DEFAULT_OPERATION_TTL_MINUTES } from './helpers'; + +/** The operation type returned by the database query, with transactions attached. */ +export type RebalanceOperation = CamelCasedProperties & { + transactions?: Record; +}; + +export interface CallbackDescriptor { + /** Human-readable name for logging (e.g., 'mETH', 'aManUSDe') */ + name: string; + + /** Bridge tag(s) to filter operations (e.g., 'stargate-amanusde', ['mantle', 'across-mantle']) */ + bridge: string | string[]; + + /** Operation statuses to query for in-flight operations */ + statuses: RebalanceOperationStatus[]; + + /** Optional additional filter: chain ID */ + chainId?: number; + + /** TTL override in minutes (defaults to config.regularRebalanceOpTTLMinutes or 24h) */ + ttlMinutes?: number; + + /** Status to set when an operation times out (default: CANCELLED) */ + timeoutStatus?: RebalanceOperationStatus; + + /** + * Called when an operation times out, after the status has been updated. + * Use for side effects like cancelling linked earmarks. + */ + onTimeout?: (operation: RebalanceOperation, context: ProcessingContext) => Promise; + + /** + * Process a single in-flight operation. This is where the bridge-specific + * state machine logic lives (e.g., PENDING → AWAITING_CALLBACK → COMPLETED). + * + * The operation's `status` field may be mutated to reflect in-memory transitions + * within a single poll cycle (e.g., PENDING→AWAITING_CALLBACK fall-through). + */ + processOperation: (operation: RebalanceOperation, context: ProcessingContext) => Promise; +} + +/** + * Run the callback loop for in-flight rebalance operations. + * + * Fetches operations matching the descriptor's filters, checks timeouts, + * and delegates per-operation processing to the descriptor. + */ +export async function runCallbackLoop(context: ProcessingContext, descriptor: CallbackDescriptor): Promise { + const { logger, requestId, config, database: db } = context; + const name = descriptor.name; + + const operationTtlMinutes = + descriptor.ttlMinutes ?? config.regularRebalanceOpTTLMinutes ?? DEFAULT_OPERATION_TTL_MINUTES; + const timeoutStatus = descriptor.timeoutStatus ?? RebalanceOperationStatus.CANCELLED; + + logger.info(`Executing callbacks for ${name} rebalance`, { requestId }); + + const { operations } = await db.getRebalanceOperations(undefined, undefined, { + status: descriptor.statuses, + bridge: descriptor.bridge, + ...(descriptor.chainId !== undefined ? { chainId: descriptor.chainId } : {}), + }); + + logger.debug(`Found ${operations.length} ${name} rebalance operations`, { + count: operations.length, + requestId, + operationTtlMinutes, + }); + + for (const operation of operations) { + const logContext = { + requestId, + operationId: operation.id, + earmarkId: operation.earmarkId, + originChain: operation.originChainId, + destinationChain: operation.destinationChainId, + status: operation.status, + }; + + // Check for operation timeout + if (operation.createdAt && isOperationTimedOut(operation.createdAt, operationTtlMinutes)) { + const operationAgeMinutes = Math.round((Date.now() - operation.createdAt.getTime()) / (60 * 1000)); + logger.warn(`${name} operation timed out, marking as ${timeoutStatus}`, { + ...logContext, + createdAt: operation.createdAt.toISOString(), + operationAgeMinutes, + ttlMinutes: operationTtlMinutes, + }); + + try { + await db.updateRebalanceOperation(operation.id, { status: timeoutStatus }); + if (descriptor.onTimeout) { + await descriptor.onTimeout(operation, context); + } + } catch (error) { + logger.error(`Failed to handle timed-out ${name} operation`, { + ...logContext, + error: jsonifyError(error), + }); + } + continue; + } + + // Delegate to bridge-specific processing + try { + await descriptor.processOperation(operation, context); + } catch (error) { + logger.error(`Failed to process ${name} callback for operation`, { + ...logContext, + error: jsonifyError(error), + }); + } + } +} diff --git a/packages/poller/src/rebalance/callbacks.ts b/packages/poller/src/rebalance/callbacks.ts index e08ef1c0..a8a1823a 100644 --- a/packages/poller/src/rebalance/callbacks.ts +++ b/packages/poller/src/rebalance/callbacks.ts @@ -14,6 +14,7 @@ import { } from '@mark/core'; import { buildTransactionsForAction } from '@mark/rebalance'; import { TransactionEntry, TransactionReceipt } from '@mark/database'; +import { getRegisteredBridgeTags } from './registry'; export const executeDestinationCallbacks = async (context: ProcessingContext): Promise => { const { logger, requestId, config, rebalance, chainService, database: db } = context; @@ -65,6 +66,10 @@ export const executeDestinationCallbacks = async (context: ProcessingContext): P return RebalanceOperationStatus.COMPLETED; }; + // Bridge tags managed by dedicated rebalancers — skip them here + // so the generic handler doesn't race and mark them completed prematurely. + const ownedBridgeTags = getRegisteredBridgeTags(); + for (const operation of operations) { const logContext = { requestId, @@ -74,6 +79,16 @@ export const executeDestinationCallbacks = async (context: ProcessingContext): P destinationChain: operation.destinationChainId, }; + // Skip operations owned by dedicated rebalancers + if (operation.bridge && ownedBridgeTags.has(operation.bridge)) { + logger.debug('Skipping operation managed by dedicated rebalancer', { + ...logContext, + bridge: operation.bridge, + status: operation.status, + }); + continue; + } + // Handle AWAITING_POST_BRIDGE operations (execute post-bridge actions) if (operation.status === RebalanceOperationStatus.AWAITING_POST_BRIDGE) { const matchingRoute = findMatchingRoute(operation); diff --git a/packages/poller/src/rebalance/helpers.ts b/packages/poller/src/rebalance/helpers.ts new file mode 100644 index 00000000..94e3bb2f --- /dev/null +++ b/packages/poller/src/rebalance/helpers.ts @@ -0,0 +1,68 @@ +/** + * Shared helpers for rebalancer modules. + */ + +import { SupportedBridge } from '@mark/core'; + +// Default operation timeout: 24 hours (in minutes) +export const DEFAULT_OPERATION_TTL_MINUTES = 24 * 60; + +/** + * Check if an operation has exceeded its TTL (time-to-live). + * Operations stuck in PENDING or AWAITING_CALLBACK for too long should be marked as failed. + * + * @param createdAt - Operation creation timestamp + * @param ttlMinutes - TTL in minutes (default: 24 hours) + * @returns true if operation has timed out + */ +export function isOperationTimedOut(createdAt: Date, ttlMinutes: number = DEFAULT_OPERATION_TTL_MINUTES): boolean { + const maxAgeMs = ttlMinutes * 60 * 1000; + const operationAgeMs = Date.now() - createdAt.getTime(); + return operationAgeMs > maxAgeMs; +} + +/** + * Map from bridge tag (stored in DB) to the SupportedBridge adapter type. + * Replaces fragile `bridge.split('-')[0]` parsing scattered across rebalancers. + */ +const BRIDGE_TAG_TO_TYPE: Record = { + // Aave token flows + 'stargate-amanusde': SupportedBridge.Stargate, + 'stargate-amansyrupusdt': SupportedBridge.Stargate, + // TAC USDT flow + 'stargate-tac': SupportedBridge.Stargate, + // mETH flows + [SupportedBridge.Mantle]: SupportedBridge.Mantle, + [`${SupportedBridge.Across}-mantle`]: SupportedBridge.Across, + // Solana CCIP flow + 'ccip-solana-mainnet': SupportedBridge.CCIP, +}; + +/** + * Resolve the SupportedBridge adapter type from a bridge tag stored in the database. + * First checks the explicit mapping, then falls back to extracting the prefix before + * the first '-' (e.g., 'stargate-foo' → 'stargate') for forward compatibility + * with new bridge tags that follow the convention. + * + * Returns undefined only if neither approach yields a valid SupportedBridge. + */ +export function getBridgeTypeFromTag(bridgeTag: string): SupportedBridge | undefined { + const explicit = BRIDGE_TAG_TO_TYPE[bridgeTag]; + if (explicit) return explicit; + + // Fallback: extract prefix before first '-' and check if it's a valid SupportedBridge + const prefix = bridgeTag.split('-')[0]; + if (Object.values(SupportedBridge).includes(prefix as SupportedBridge)) { + return prefix as SupportedBridge; + } + + return undefined; +} + +/** + * Register a custom bridge tag → adapter type mapping at runtime. + * Useful for new rebalancers that introduce new bridge tags. + */ +export function registerBridgeTag(tag: string, bridgeType: SupportedBridge): void { + BRIDGE_TAG_TO_TYPE[tag] = bridgeType; +} diff --git a/packages/poller/src/rebalance/mantleEth.ts b/packages/poller/src/rebalance/mantleEth.ts index d69a74cf..af4f89be 100644 --- a/packages/poller/src/rebalance/mantleEth.ts +++ b/packages/poller/src/rebalance/mantleEth.ts @@ -17,7 +17,7 @@ import { import { ProcessingContext } from '../init'; import { getActualAddress } from '../helpers/zodiac'; import { submitTransactionWithLogging } from '../helpers/transactions'; -import { MemoizedTransactionRequest, RebalanceTransactionMemo } from '@mark/rebalance'; +import { MemoizedTransactionRequest } from '@mark/rebalance'; import { createEarmark, createRebalanceOperation, @@ -29,58 +29,15 @@ import { import { IntentStatus } from '@mark/everclear'; import { ChainService } from '@mark/chainservice'; +import { getBridgeTypeFromTag } from './helpers'; +import { runCallbackLoop, RebalanceOperation } from './callbackEngine'; +import { SenderConfig, RebalanceRunState } from './types'; +import { runThresholdRebalance, ThresholdRebalanceDescriptor } from './thresholdEngine'; +import { submitBridgeTransactions, executeEvmBridge } from './bridgeExecution'; + const WETH_TICKER_HASH = '0x0f8a193ff464434486c0daf7db2a895884365d2bc84ba47a68fcf89c1b14b5b8'; const METH_TICKER_HASH = '0xd5a2aecb01320815a5625da6d67fbe0b34c12b267ebb3b060c014486ec5484d8'; -// Default operation timeout: 24 hours (in minutes) -const DEFAULT_OPERATION_TTL_MINUTES = 24 * 60; - -/** - * Check if an operation has exceeded its TTL (time-to-live). - * Operations stuck in PENDING or AWAITING_CALLBACK for too long should be marked as failed. - * - * @param createdAt - Operation creation timestamp - * @param ttlMinutes - TTL in minutes (default: 24 hours) - * @returns true if operation has timed out - */ -function isOperationTimedOut(createdAt: Date, ttlMinutes: number = DEFAULT_OPERATION_TTL_MINUTES): boolean { - const maxAgeMs = ttlMinutes * 60 * 1000; - const operationAgeMs = Date.now() - createdAt.getTime(); - return operationAgeMs > maxAgeMs; -} - -type ExecuteBridgeContext = Pick; - -interface SenderConfig { - address: string; // Sender's Ethereum address - signerUrl?: string; // Web3signer URL for this sender (uses default if not specified) - label: 'market-maker' | 'fill-service'; // For logging -} -interface ExecuteBridgeParams { - context: ExecuteBridgeContext; - route: { - origin: number; - destination: number; - asset: string; - }; - bridgeType: SupportedBridge; - bridgeTxRequests: MemoizedTransactionRequest[]; - amountToBridge: bigint; - senderOverride?: SenderConfig; // Optional: use different sender than config.ownAddress -} - -interface ExecuteBridgeResult { - receipt?: TransactionReceipt; - effectiveBridgedAmount: string; -} - -/** - * Shared state for tracking WETH that has been committed in this run - * This prevents over-committing when both MM and FS need rebalancing simultaneously - */ -interface RebalanceRunState { - committedEthWeth: bigint; // Amount of ETH WETH committed in this run (not yet confirmed on-chain) -} interface ThresholdRebalanceParams { context: ProcessingContext; origin: string; @@ -88,93 +45,9 @@ interface ThresholdRebalanceParams { amountToBridge: bigint; runState: RebalanceRunState; earmarkId: string | null; // null for threshold-based + skipCommitTracking?: boolean; // true when called from threshold engine (which tracks itself) } -/** - * Submits a sequence of bridge transactions and returns the final receipt and effective bridged amount. - * @param senderOverride - If provided, uses this address as sender instead of config.ownAddress - */ -const executeBridgeTransactions = async ({ - context, - route, - bridgeType, - bridgeTxRequests, - amountToBridge, - senderOverride, -}: ExecuteBridgeParams): Promise => { - const { logger, chainService, config, requestId } = context; - - // Use sender override if provided, otherwise default to ownAddress - const senderAddress = senderOverride?.address ?? config.ownAddress; - const senderLabel = senderOverride?.label ?? 'market-maker'; - - let idx = -1; - let effectiveBridgedAmount = amountToBridge.toString(); - let receipt: TransactionReceipt | undefined; - - for (const { transaction, memo, effectiveAmount } of bridgeTxRequests) { - idx++; - logger.info('Submitting bridge transaction', { - requestId, - route, - bridgeType, - transactionIndex: idx, - totalTransactions: bridgeTxRequests.length, - transaction, - memo, - amountToBridge, - sender: senderAddress, - senderType: senderLabel, - }); - - const result = await submitTransactionWithLogging({ - chainService, - logger, - chainId: route.origin.toString(), - txRequest: { - to: transaction.to!, - data: transaction.data!, - value: (transaction.value || 0).toString(), - chainId: route.origin, - from: senderAddress, - funcSig: transaction.funcSig || '', - }, - zodiacConfig: { - walletType: WalletType.EOA, - }, - context: { requestId, route, bridgeType, transactionType: memo, sender: senderLabel }, - }); - - logger.info('Successfully submitted bridge transaction', { - requestId, - route, - bridgeType, - transactionIndex: idx, - totalTransactions: bridgeTxRequests.length, - transactionHash: result.hash, - memo, - amountToBridge, - }); - - if (memo !== RebalanceTransactionMemo.Rebalance) { - continue; - } - - receipt = result.receipt! as unknown as TransactionReceipt; - if (effectiveAmount) { - effectiveBridgedAmount = effectiveAmount; - logger.info('Using effective bridged amount from adapter', { - requestId, - originalAmount: amountToBridge.toString(), - effectiveAmount: effectiveBridgedAmount, - bridgeType, - }); - } - } - - return { receipt, effectiveBridgedAmount }; -}; - export async function rebalanceMantleEth(context: ProcessingContext): Promise { const { logger, requestId, config, rebalance } = context; const actions: RebalanceAction[] = []; @@ -235,7 +108,7 @@ export async function rebalanceMantleEth(context: ProcessingContext): Promise fsConfig.thresholdEnabled, + + hasInFlightOperations: async () => { + const { operations } = await database.getRebalanceOperations(undefined, undefined, { + status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + bridge: [SupportedBridge.Mantle, `${SupportedBridge.Across}-mantle`], + }); + if (operations.length > 0) { + logger.info(`Found ${operations.length} in-flight mETH rebalance operations, skipping threshold`, { + requestId, + inFlightCount: operations.length, + }); + } + return operations.length > 0; + }, + + getRecipientBalance: async () => { + const balance = await getEvmBalance( config, MANTLE_CHAIN_ID.toString(), - fsConfig.address, + fsConfig.address!, getTokenAddressFromConfig(METH_TICKER_HASH, MANTLE_CHAIN_ID.toString(), config)!, getDecimalsFromConfig(METH_TICKER_HASH, MANTLE_CHAIN_ID.toString(), config)!, prometheus, ); - } catch (error) { - logger.warn('Failed to check FS receiver mETH balance', { + // Include funds already committed by PRIORITY 1 (intent-based) in this run + const total = balance + runState.committedAmount; + logger.info('Checking FS receiver mETH balance', { requestId, - fsReceiverAddress: fsConfig.address, - error: jsonifyError(error), + fillServiceAddress: fsConfig.address, + senderAddress: fsSenderAddress, + fsReceiverMethBalance: balance.toString(), + committedAmount: runState.committedAmount.toString(), + total: total.toString(), + minRebalance: minRebalance.toString(), }); - return actions; - } - } + return total; + }, - logger.info('Checking FS receiver mETH balance..', { - requestId, - fillServiceAddress: fsConfig.address, - senderAddress: fsConfig.senderAddress, - fsReceiverMethBalance: fsReceiverMethBalance.toString(), - committedEthWeth: runState.committedEthWeth.toString(), - total: (fsReceiverMethBalance + runState.committedEthWeth).toString(), - threshold: threshold.toString(), - target: target.toString(), - minRebalance: minRebalance.toString(), - }); - // Add committed funds to receiver balance. - fsReceiverMethBalance += runState.committedEthWeth; + getThresholds: () => ({ threshold, target }), - // Get FS sender's WETH balance on Mainnet - let fsSenderWethBalance = 0n; - if (fsSenderAddress) { - try { - fsSenderWethBalance = await getEvmBalance( + // WETH/mETH both use 18 decimals, no conversion needed + convertShortfallToBridgeAmount: async (shortfall) => shortfall, + + getSenderBalance: async () => { + if (!fsSenderAddress) return 0n; + return getEvmBalance( config, MAINNET_CHAIN_ID.toString(), fsSenderAddress, @@ -502,79 +381,25 @@ const evaluateFillServiceRebalance = async ( getDecimalsFromConfig(WETH_TICKER_HASH, MAINNET_CHAIN_ID.toString(), config)!, prometheus, ); - } catch (error) { - logger.warn('Failed to check FS sender WETH balance', { - requestId, - fsSenderAddress, - error: jsonifyError(error), - }); - return actions; - } - } - - if (fsReceiverMethBalance >= threshold) { - logger.info('FS receiver has enough mETH, no rebalance needed', { - requestId, - fsReceiverMethBalance: fsReceiverMethBalance.toString(), - thresholdMethBalance: threshold.toString(), - }); - - return actions; - } - - const shortfall = target - fsReceiverMethBalance; - if (shortfall < minRebalance) { - logger.debug('FS shortfall below minimum rebalance amount, skipping', { - requestId, - shortfall: shortfall.toString(), - minRebalance: minRebalance.toString(), - }); - return actions; - } - - // Check if sender has enough WETH to cover the shortfall - // If fsSenderWethBalance < shortfall, sender doesn't have enough funds to bridge - if (fsSenderWethBalance < shortfall) { - logger.warn('FS sender has insufficient WETH to cover the full shortfall', { - requestId, - fsSenderWethBalance: fsSenderWethBalance.toString(), - shortfall: shortfall.toString(), - note: 'Will bridge available balance if above minimum', - }); - // Don't return early - we can still bridge what we have if above minimum - } - - // Calculate amount to bridge: min(shortfall, available balance) - const amountFromSender = fsSenderWethBalance < shortfall ? fsSenderWethBalance : shortfall; - - // Skip if available amount is below minimum - if (amountFromSender < minRebalance) { - logger.warn('Available WETH below minimum rebalance threshold, skipping', { - requestId, - availableAmount: amountFromSender.toString(), - minRebalance: minRebalance.toString(), - }); - return actions; - } + }, - logger.info('FS threshold rebalancing triggered', { - requestId, - fsSenderWethBalance: fsSenderWethBalance.toString(), - shortfall: shortfall.toString(), - amountToBridge: amountFromSender.toString(), - recipient: fsConfig.address, - }); + getAmountCaps: () => ({ min: minRebalance }), + + executeBridge: async (ctx, amount) => { + return processThresholdRebalancing({ + context: ctx, + origin: MAINNET_CHAIN_ID, + recipientAddress: fsConfig.address!, + amountToBridge: amount, + runState, + earmarkId: null, + skipCommitTracking: true, // threshold engine tracks committed amount itself + }); + }, + }; - actions.push( - ...(await processThresholdRebalancing({ - context, - origin: MAINNET_CHAIN_ID, - recipientAddress: fsConfig.address!, - amountToBridge: amountFromSender, - runState, - earmarkId: null, - })), - ); + const thresholdActions = await runThresholdRebalance(context, methThresholdDescriptor, runState); + actions.push(...thresholdActions); return actions; }; @@ -586,6 +411,7 @@ const processThresholdRebalancing = async ({ amountToBridge, runState, earmarkId, + skipCommitTracking = false, }: ThresholdRebalanceParams): Promise => { const { config, logger, requestId } = context; const bridgeConfig = config.methRebalance!.bridge; @@ -608,17 +434,18 @@ const processThresholdRebalancing = async ({ // before calling this function. No need to re-check here. // Execute bridge (no earmark for threshold-based) - // Pass runState to track committed funds const actions = await executeMethBridge(context, origin.toString(), recipientAddress, amountToBridge, earmarkId); - // Track committed funds if bridge was successful - if (actions.length > 0) { - runState.committedEthWeth += amountToBridge; + // Track committed funds if bridge was successful. + // When called from the threshold engine, skipCommitTracking is set to avoid double-counting + // (the engine tracks committed amounts itself in step 10). + if (actions.length > 0 && !skipCommitTracking) { + runState.committedAmount += amountToBridge; logger.debug('Updated committed funds after threshold bridge', { requestId, recipient: recipientAddress, bridgedAmount: amountToBridge.toString(), - totalCommitted: runState.committedEthWeth.toString(), + totalCommitted: runState.committedAmount.toString(), }); } @@ -642,7 +469,6 @@ const executeMethBridge = async ( const isForFillService = recipientAddress.toLowerCase() === config.methRebalance?.fillService?.address?.toLowerCase(); // --- Leg 1: Bridge WETH from origin chain to Mainnet via Across --- - let rebalanceSuccessful = false; const bridgeType = SupportedBridge.Across; // Determine sender for the bridge based on recipient type @@ -765,7 +591,7 @@ const executeMethBridge = async ( }); // Use slippage from config (default 500 = 5%) - const slippageDbps = config.methRebalance!.bridge.slippageDbps; + const slippageDbps = config.methRebalance!.bridge.slippageDbps ?? 500; // Send WETH to Mainnet first const route = { @@ -793,52 +619,77 @@ const executeMethBridge = async ( return []; } - let bridgeTxRequests: MemoizedTransactionRequest[] = []; - let receivedAmount: bigint = amount; - const originIsMainnet = String(origin) === MAINNET_CHAIN_ID; - if (!originIsMainnet) { + + if (originIsMainnet) { + // Origin is mainnet — skip Across bridge, just create DB record with AWAITING_CALLBACK try { - const amountInNativeUnits = convertToNativeUnits(amount, originWethDecimals); - // Get quote - const receivedAmountStr = await adapter.getReceivedAmount(amountInNativeUnits.toString(), route); - logger.info('Received Across quote', { + await createRebalanceOperation({ + earmarkId: earmarkId, + originChainId: route.origin, + destinationChainId: route.destination, + tickerHash: getTickerForAsset(route.asset, route.origin, config) || WETH_TICKER_HASH, + amount: amount.toString(), + slippage: route.slippagesDbps[0], + status: RebalanceOperationStatus.AWAITING_CALLBACK, + bridge: `${bridgeType}-mantle`, + recipient: recipientAddress, + }); + + logger.info('Successfully created mETH Leg 1 rebalance operation (origin is mainnet)', { requestId, route, - amountToBridge: amountInNativeUnits.toString(), - receivedAmount: receivedAmountStr, + bridgeType, + amountToBridge: amount.toString(), }); - // Check slippage - use safeParseBigInt for adapter response - // Note: Both receivedAmount and minimumAcceptableAmount are in native units (18 decimals for WETH) - receivedAmount = safeParseBigInt(receivedAmountStr); - const slippageDbps = BigInt(route.slippagesDbps[0]); // slippagesDbps is number[], BigInt is safe - const minimumAcceptableAmount = amountInNativeUnits - (amountInNativeUnits * slippageDbps) / DBPS_MULTIPLIER; - - if (receivedAmount < minimumAcceptableAmount) { - logger.warn('Across quote does not meet slippage requirements', { - requestId, - route, - amountToBridge: amountInNativeUnits.toString(), - receivedAmount: receivedAmount.toString(), - minimumAcceptableAmount: minimumAcceptableAmount.toString(), - }); - return []; - } + actions.push({ + bridge: adapter.type(), + amount: amount.toString(), + origin: route.origin, + destination: route.destination, + asset: route.asset, + transaction: '', + recipient: recipientAddress, + }); + } catch (error) { + logger.error('Failed to create mETH rebalance operation', { + requestId, + route, + error: jsonifyError(error), + }); + return []; + } + } else { + // Non-mainnet origin — full Across bridge flow + try { + const amountInNativeUnits = convertToNativeUnits(amount, originWethDecimals); - // Get bridge transactions - bridgeTxRequests = await adapter.send(evmSender, recipientAddress, amountInNativeUnits.toString(), route); + const result = await executeEvmBridge({ + context, + adapter, + route, + amount: amountInNativeUnits, + dbAmount: amount, // preserve 18-decimal for DB record consistency + sender: evmSender, + recipient: recipientAddress, + slippageTolerance: BigInt(route.slippagesDbps[0]), + slippageMultiplier: DBPS_MULTIPLIER, + chainService: selectedChainService, + senderConfig, + dbRecord: { + earmarkId: earmarkId, + tickerHash: getTickerForAsset(route.asset, route.origin, config) || WETH_TICKER_HASH, + bridgeTag: `${bridgeType}-mantle`, + status: RebalanceOperationStatus.PENDING, + }, + label: 'mETH Leg 1 Across', + }); - if (!bridgeTxRequests.length) { - logger.error('No bridge transactions returned from Across adapter', { requestId }); + if (result.actions.length === 0) { return []; } - - logger.info('Prepared Across bridge transactions', { - requestId, - route, - transactionCount: bridgeTxRequests.length, - }); + actions.push(...result.actions); } catch (error) { logger.error('Failed to execute Across bridge', { requestId, @@ -850,474 +701,385 @@ const executeMethBridge = async ( } } - try { - // Execute bridge transactions using the selected chain service and sender - const { receipt, effectiveBridgedAmount } = await executeBridgeTransactions({ - context: { requestId, logger, chainService: selectedChainService, config }, - route, - bridgeType, - bridgeTxRequests, - amountToBridge: amount, - senderOverride: senderConfig, - }); + return actions; +}; - // Create database record for Leg 1 - await createRebalanceOperation({ - earmarkId: earmarkId, - originChainId: route.origin, - destinationChainId: route.destination, - tickerHash: getTickerForAsset(route.asset, route.origin, config) || WETH_TICKER_HASH, - amount: effectiveBridgedAmount, - slippage: route.slippagesDbps[0], - status: originIsMainnet ? RebalanceOperationStatus.AWAITING_CALLBACK : RebalanceOperationStatus.PENDING, - bridge: `${bridgeType}-mantle`, - transactions: receipt - ? { - [route.origin]: receipt, - } - : undefined, - recipient: recipientAddress, - }); +export const executeMethCallbacks = async (context: ProcessingContext): Promise => { + return runCallbackLoop(context, { + name: 'mETH', + bridge: [SupportedBridge.Mantle, `${SupportedBridge.Across}-mantle`], + statuses: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + onTimeout: async (operation, ctx) => { + if (operation.earmarkId) { + await ctx.database.updateEarmarkStatus(operation.earmarkId, EarmarkStatus.CANCELLED); + ctx.logger.info('Earmark cancelled due to operation timeout', { + requestId: ctx.requestId, + operationId: operation.id, + earmarkId: operation.earmarkId, + }); + } + }, + processOperation: (operation, ctx) => processMethOperation(operation, ctx), + }); +}; - logger.info('Successfully created mETH Leg 1 rebalance operation', { - requestId, - route, - bridgeType, - originTxHash: receipt?.transactionHash, - amountToBridge: effectiveBridgedAmount, - }); +/** + * Process a single in-flight mETH operation through its state machine. + */ +async function processMethOperation(operation: RebalanceOperation, context: ProcessingContext): Promise { + const { logger, requestId, config, rebalance, chainService, fillServiceChainService, database: db } = context; + const logContext = { + requestId, + operationId: operation.id, + earmarkId: operation.earmarkId, + originChain: operation.originChainId, + destinationChain: operation.destinationChainId, + }; - // Track the operation - const rebalanceAction: RebalanceAction = { - bridge: adapter.type(), - amount: amount.toString(), - origin: route.origin, - destination: route.destination, - asset: route.asset, - transaction: receipt?.transactionHash || '', - recipient: recipientAddress, - }; - actions.push(rebalanceAction); + if (!operation.bridge) { + logger.warn('Operation missing bridge type', logContext); + return; + } - rebalanceSuccessful = true; - } catch (error) { - logger.error('Failed to execute Across bridge', { - requestId, - route, - bridgeType, - error: jsonifyError(error), - }); - return []; + const bridgeType = getBridgeTypeFromTag(operation.bridge); + const isToMainnetBridge = operation.bridge === `${SupportedBridge.Across}-mantle`; + const isFromMainnetBridge = operation.originChainId === Number(MAINNET_CHAIN_ID); + + if (!bridgeType) { + logger.warn('Unrecognized bridge tag, skipping', { ...logContext, bridge: operation.bridge }); + return; } - if (rebalanceSuccessful) { - logger.info('Leg 1 rebalance successful', { - requestId, - route, - amount: amount.toString(), - }); - } else { - logger.warn('Failed to complete Leg 1 rebalance', { - requestId, - route, - amount: amount.toString(), - }); + if (bridgeType !== SupportedBridge.Mantle && !isToMainnetBridge) { + logger.warn('Operation is not a mantle bridge', logContext); + return; } - return actions; -}; + const adapter = rebalance.getAdapter(bridgeType); -export const executeMethCallbacks = async (context: ProcessingContext): Promise => { - const { logger, requestId, config, rebalance, chainService, fillServiceChainService, database: db } = context; - logger.info('Executing destination callbacks for meth rebalance', { requestId }); + // Get origin transaction hash from JSON field + const txHashesField = operation.transactions; + const originTx = txHashesField?.[operation.originChainId] as + | TransactionEntry<{ receipt: TransactionReceipt }> + | undefined; - // Get operation TTL from config (with default fallback) - const operationTtlMinutes = config.regularRebalanceOpTTLMinutes ?? DEFAULT_OPERATION_TTL_MINUTES; + if (!originTx && !isFromMainnetBridge) { + logger.warn('Operation missing origin transaction', { ...logContext, operation }); + return; + } - // Get all pending operations from database - const { operations } = await db.getRebalanceOperations(undefined, undefined, { - status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], - bridge: [SupportedBridge.Mantle, `${SupportedBridge.Across}-mantle`], - }); + // Get the transaction receipt from origin chain + const receipt = originTx?.metadata?.receipt; + if (!receipt && !isFromMainnetBridge) { + logger.info('Origin transaction receipt not found for operation', { ...logContext, operation }); + return; + } - logger.debug(`Found ${operations.length} meth rebalance operations`, { - count: operations.length, - requestId, - statuses: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], - operationTtlMinutes, - }); + const assetAddress = getTokenAddressFromConfig(operation.tickerHash, operation.originChainId.toString(), config); - for (const operation of operations) { - const logContext = { - requestId, - operationId: operation.id, - earmarkId: operation.earmarkId, + if (!assetAddress) { + logger.error('Could not find asset address for ticker hash', { + ...logContext, + tickerHash: operation.tickerHash, originChain: operation.originChainId, - destinationChain: operation.destinationChainId, - }; + }); + return; + } - if (!operation.bridge) { - logger.warn('Operation missing bridge type', logContext); - continue; - } + let route = { + origin: operation.originChainId, + destination: operation.destinationChainId, + asset: assetAddress, + }; - // Check for operation timeout - operations stuck too long should be marked as cancelled - if (operation.createdAt && isOperationTimedOut(operation.createdAt, operationTtlMinutes)) { - const operationAgeMinutes = Math.round((Date.now() - operation.createdAt.getTime()) / (60 * 1000)); - logger.warn('Operation timed out - marking as cancelled', { - ...logContext, - createdAt: operation.createdAt.toISOString(), - operationAgeMinutes, - ttlMinutes: operationTtlMinutes, - status: operation.status, - }); + // Determine if this is for Fill Service or Market Maker based on recipient + if (!operation.recipient) { + logger.error('Operation missing recipient address', logContext); + return; + } + const isForFillService = + operation.recipient.toLowerCase() === config.methRebalance?.fillService?.address?.toLowerCase(); + const fillerSenderAddress = + config.methRebalance?.fillService?.senderAddress ?? config.methRebalance?.fillService?.address; - try { + if (isForFillService && !fillServiceChainService) { + logger.warn('Fill service chain service not available for FS operation callback, skipping', { + ...logContext, + recipientAddress: operation.recipient, + note: 'fillServiceChainService may not be configured in this deployment', + }); + return; + } + + const evmSender = isForFillService ? fillerSenderAddress! : config.ownAddress; + const selectedChainService = isForFillService ? fillServiceChainService! : chainService; + + // Check if ready for callback + if (operation.status === RebalanceOperationStatus.PENDING) { + try { + const ready = await adapter.readyOnDestination( + operation.amount, + route, + receipt as unknown as ViemTransactionReceipt, + ); + if (ready) { await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.CANCELLED, + status: RebalanceOperationStatus.AWAITING_CALLBACK, }); - - // Also update earmark if present - if (operation.earmarkId) { - await db.updateEarmarkStatus(operation.earmarkId, EarmarkStatus.CANCELLED); - logger.info('Earmark cancelled due to operation timeout', { - ...logContext, - earmarkId: operation.earmarkId, - }); - } - } catch (error) { - logger.error('Failed to cancel timed-out operation', { + logger.info('Operation ready for callback, updated status', { ...logContext, - error: jsonifyError(error), + status: RebalanceOperationStatus.AWAITING_CALLBACK, }); + operation.status = RebalanceOperationStatus.AWAITING_CALLBACK; + } else { + logger.info('Action not ready for destination callback', logContext); + return; } - continue; + } catch (e: unknown) { + logger.error('Failed to check if ready on destination', { ...logContext, error: jsonifyError(e) }); + return; } + } - const bridgeType = operation.bridge.split('-')[0]; - const isToMainnetBridge = operation.bridge.split('-').length === 2 && operation.bridge.split('-')[1] === 'mantle'; - const isFromMainnetBridge = operation.originChainId === Number(MAINNET_CHAIN_ID); + // Execute callback if awaiting + if (operation.status === RebalanceOperationStatus.AWAITING_CALLBACK) { + let callback; - if (bridgeType !== SupportedBridge.Mantle && !isToMainnetBridge) { - logger.warn('Operation is not a mantle bridge', logContext); - continue; + // no need to execute callback if origin is mainnet + if (!isFromMainnetBridge) { + try { + callback = await adapter.destinationCallback(route, receipt as unknown as ViemTransactionReceipt); + } catch (e: unknown) { + logger.error('Failed to retrieve destination callback', { ...logContext, error: jsonifyError(e) }); + return; + } } - const adapter = rebalance.getAdapter(bridgeType as SupportedBridge); + let amountToBridge = operation.amount.toString(); + let successCallback = false; + let callbackTxHashes: { [key: string]: TransactionReceipt } = {}; + if (!callback) { + logger.info('No destination callback required, marking as completed', logContext); + successCallback = true; + } else { + logger.info('Retrieved destination callback', { + ...logContext, + callback: serializeBigInt(callback), + receipt: serializeBigInt(receipt), + }); + + try { + const tx = await submitTransactionWithLogging({ + chainService: selectedChainService as ChainService, + logger, + chainId: route.destination.toString(), + txRequest: { + chainId: +route.destination, + to: callback.transaction.to!, + data: callback.transaction.data!, + value: (callback.transaction.value || 0).toString(), + from: evmSender, + funcSig: callback.transaction.funcSig || '', + }, + zodiacConfig: { + walletType: WalletType.EOA, + }, + context: { ...logContext, callbackType: `destination: ${callback.memo}` }, + }); - // Get origin transaction hash from JSON field - const txHashes = operation.transactions; - const originTx = txHashes?.[operation.originChainId] as - | TransactionEntry<{ receipt: TransactionReceipt }> - | undefined; + logger.info('Successfully submitted destination callback', { + ...logContext, + callback: serializeBigInt(callback), + receipt: serializeBigInt(receipt), + destinationTx: tx.hash, + sender: evmSender, + walletType: WalletType.EOA, + senderType: isForFillService ? 'fill-service' : 'market-maker', + }); - if (!originTx && !isFromMainnetBridge) { - logger.warn('Operation missing origin transaction', { ...logContext, operation }); - continue; - } + if (!tx || !tx.receipt) { + logger.error('Destination transaction receipt not found', { ...logContext, tx }); + return; + } - // Get the transaction receipt from origin chain - const receipt = originTx?.metadata?.receipt; - if (!receipt && !isFromMainnetBridge) { - logger.info('Origin transaction receipt not found for operation', { ...logContext, operation }); - continue; + successCallback = true; + callbackTxHashes[route.destination.toString()] = tx.receipt as TransactionReceipt; + amountToBridge = (callback.transaction.value as bigint).toString(); + } catch (e) { + logger.error('Failed to execute destination callback', { + ...logContext, + callback: serializeBigInt(callback), + receipt: serializeBigInt(receipt), + error: jsonifyError(e), + }); + return; + } } - const assetAddress = getTokenAddressFromConfig(operation.tickerHash, operation.originChainId.toString(), config); + if (isToMainnetBridge) { + // Stake WETH / ETH on mainnet to get mETH and bridge to Mantle using the Mantle adapter + const mantleAdapter = rebalance.getAdapter(SupportedBridge.Mantle); + if (!mantleAdapter) { + logger.error('Mantle adapter not found', { ...logContext }); + return; + } + + const mantleBridgeType = SupportedBridge.Mantle; - if (!assetAddress) { - logger.error('Could not find asset address for ticker hash', { - ...logContext, - tickerHash: operation.tickerHash, - originChain: operation.originChainId, - }); - continue; - } + route = { + origin: Number(MAINNET_CHAIN_ID), + destination: Number(MANTLE_CHAIN_ID), + asset: getTokenAddressFromConfig(WETH_TICKER_HASH, MAINNET_CHAIN_ID.toString(), config) || '', + }; - let route = { - origin: operation.originChainId, - destination: operation.destinationChainId, - asset: assetAddress, - }; + // Step 1: Get Quote + let receivedAmountStr: string; + try { + receivedAmountStr = await mantleAdapter.getReceivedAmount(amountToBridge, route); + logger.info('Received quote from mantle adapter', { + requestId, + route, + bridgeType: mantleBridgeType, + amountToBridge, + receivedAmount: receivedAmountStr, + }); + } catch (quoteError) { + logger.error('Failed to get quote from Mantle adapter', { + requestId, + route, + bridgeType: mantleBridgeType, + amountToBridge, + error: jsonifyError(quoteError), + }); + return; + } - // Determine if this is for Fill Service or Market Maker based on recipient - const isForFillService = - operation.recipient!.toLowerCase() === config.methRebalance?.fillService?.address?.toLowerCase(); - const fillerSenderAddress = - config.methRebalance?.fillService?.senderAddress ?? config.methRebalance?.fillService?.address; - let evmSender = isForFillService ? fillerSenderAddress! : config.ownAddress; - let selectedChainService = isForFillService ? fillServiceChainService : chainService; - // Check if ready for callback - if (operation.status === RebalanceOperationStatus.PENDING) { + // Step 2: Get Bridge Transaction Requests + let bridgeTxRequests: MemoizedTransactionRequest[] = []; try { - const ready = await adapter.readyOnDestination( - operation.amount, + bridgeTxRequests = await mantleAdapter.send(evmSender, operation.recipient!, amountToBridge, route); + logger.info('Prepared bridge transaction request from Mantle adapter', { + requestId, route, - receipt as unknown as ViemTransactionReceipt, - ); - if (ready) { - // Update status to awaiting callback - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.AWAITING_CALLBACK, - }); - logger.info('Operation ready for callback, updated status', { - ...logContext, - status: RebalanceOperationStatus.AWAITING_CALLBACK, - }); - - // Update the operation object for further processing - operation.status = RebalanceOperationStatus.AWAITING_CALLBACK; - } else { - logger.info('Action not ready for destination callback', logContext); + bridgeType: mantleBridgeType, + bridgeTxRequests, + amountToBridge, + receiveAmount: receivedAmountStr, + transactionCount: bridgeTxRequests.length, + sender: evmSender, + recipient: operation.recipient, + }); + if (!bridgeTxRequests.length) { + throw new Error(`Failed to retrieve any bridge transaction requests`); } - } catch (e: unknown) { - logger.error('Failed to check if ready on destination', { ...logContext, error: jsonifyError(e) }); - continue; + } catch (sendError) { + logger.error('Failed to get bridge transaction request from Mantle adapter', { + requestId, + route, + bridgeType: mantleBridgeType, + amountToBridge, + error: jsonifyError(sendError), + }); + return; } - } - // Execute callback if awaiting - else if (operation.status === RebalanceOperationStatus.AWAITING_CALLBACK) { - let callback; - - // no need to execute callback if origin is mainnet - if (!isFromMainnetBridge) { - try { - callback = await adapter.destinationCallback(route, receipt as unknown as ViemTransactionReceipt); - } catch (e: unknown) { - logger.error('Failed to retrieve destination callback', { ...logContext, error: jsonifyError(e) }); - continue; + // Step 3: Mark Leg 1 as COMPLETED *before* submitting Leg 2 to prevent + // double-bridge on retry (if Leg 2 succeeds but Leg 1 status update fails, + // the next poll cycle would re-process Leg 1 and trigger another Leg 2). + try { + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.COMPLETED, + txHashes: callbackTxHashes, + }); + if (operation.earmarkId) { + await db.updateEarmarkStatus(operation.earmarkId, EarmarkStatus.COMPLETED); } + logger.info('Marked Leg 1 operation as COMPLETED before Leg 2 submission', { + operationId: operation.id, + earmarkId: operation.earmarkId, + }); + } catch (dbError) { + logger.error('Failed to mark Leg 1 as COMPLETED — aborting Leg 2 to prevent double-bridge', { + operationId: operation.id, + error: jsonifyError(dbError), + }); + return; } - let amountToBridge = operation.amount.toString(); - let successCallback = false; - let txHashes: { [key: string]: TransactionReceipt } = {}; - if (!callback) { - // No callback needed, mark as completed - logger.info('No destination callback required, marking as completed', logContext); - successCallback = true; - } else { - logger.info('Retrieved destination callback', { - ...logContext, - callback: serializeBigInt(callback), - receipt: serializeBigInt(receipt), + // Step 4: Submit the Mantle bridge transactions + try { + const { receipt: mantleReceipt, effectiveBridgedAmount } = await submitBridgeTransactions({ + context: { requestId, logger, config }, + chainService: selectedChainService as ChainService, + route, + bridgeType: mantleBridgeType, + bridgeTxRequests, + amountToBridge: BigInt(amountToBridge), + senderOverride: { + address: evmSender, + label: isForFillService ? 'fill-service' : 'market-maker', + }, }); - // Try to execute the destination callback - try { - const tx = await submitTransactionWithLogging({ - chainService: selectedChainService as ChainService, - logger, - chainId: route.destination.toString(), - txRequest: { - chainId: +route.destination, - to: callback.transaction.to!, - data: callback.transaction.data!, - value: (callback.transaction.value || 0).toString(), - from: evmSender, - funcSig: callback.transaction.funcSig || '', - }, - zodiacConfig: { - walletType: WalletType.EOA, - }, - context: { ...logContext, callbackType: `destination: ${callback.memo}` }, - }); - - logger.info('Successfully submitted destination callback', { - ...logContext, - callback: serializeBigInt(callback), - receipt: serializeBigInt(receipt), - destinationTx: tx.hash, - sender: evmSender, - walletType: WalletType.EOA, - senderType: isForFillService ? 'fill-service' : 'market-maker', - }); - - // Update operation as completed with destination tx hash - if (!tx || !tx.receipt) { - logger.error('Destination transaction receipt not found', { ...logContext, tx }); - continue; - } - - successCallback = true; - txHashes[route.destination.toString()] = tx.receipt as TransactionReceipt; - amountToBridge = (callback.transaction.value as bigint).toString(); - } catch (e) { - logger.error('Failed to execute destination callback', { - ...logContext, - callback: serializeBigInt(callback), - receipt: serializeBigInt(receipt), - error: jsonifyError(e), - }); - continue; - } + // Step 5: Create database record for the Mantle bridge leg + await createRebalanceOperation({ + earmarkId: operation.earmarkId ?? null, + originChainId: route.origin, + destinationChainId: route.destination, + tickerHash: getTickerForAsset(route.asset, route.origin, config) || route.asset, + amount: effectiveBridgedAmount, + slippage: config.methRebalance!.bridge.slippageDbps ?? 500, + status: RebalanceOperationStatus.PENDING, + bridge: mantleBridgeType, + transactions: mantleReceipt ? { [route.origin]: mantleReceipt } : undefined, + recipient: operation.recipient!, + }); + + logger.info('Successfully created Mantle rebalance operation in database', { + requestId, + route, + bridgeType: mantleBridgeType, + originTxHash: mantleReceipt?.transactionHash, + amountToBridge: effectiveBridgedAmount, + originalRequestedAmount: amountToBridge.toString(), + receiveAmount: receivedAmountStr, + }); + } catch (sendError) { + logger.error('Failed to send or monitor Mantle bridge transaction', { + requestId, + route, + bridgeType: mantleBridgeType, + error: jsonifyError(sendError), + note: 'Leg 1 is already COMPLETED — funds on Mainnet may need manual bridging', + }); + return; } + } + // For non-Leg-2 paths (direct Mantle bridge callback completion) + if (successCallback && !isToMainnetBridge) { try { - if (isToMainnetBridge) { - // Stake WETH / ETH on mainnet to get mETH and bridge to Mantle using the Mantle adapter - const mantleAdapter = rebalance.getAdapter(SupportedBridge.Mantle); - if (!mantleAdapter) { - logger.error('Mantle adapter not found', { ...logContext }); - continue; - } - - const mantleBridgeType = SupportedBridge.Mantle; - - route = { - origin: Number(MAINNET_CHAIN_ID), - destination: Number(MANTLE_CHAIN_ID), - asset: getTokenAddressFromConfig(WETH_TICKER_HASH, MAINNET_CHAIN_ID.toString(), config) || '', - }; - - // Step 1: Get Quote - let receivedAmountStr: string; - try { - receivedAmountStr = await mantleAdapter.getReceivedAmount(amountToBridge, route); - logger.info('Received quote from mantle adapter', { - requestId, - route, - bridgeType: mantleBridgeType, - amountToBridge, - receivedAmount: receivedAmountStr, - }); - } catch (quoteError) { - logger.error('Failed to get quote from Mantle adapter', { - requestId, - route, - bridgeType: mantleBridgeType, - amountToBridge, - error: jsonifyError(quoteError), - }); - continue; - } - - // Step 2: Get Bridge Transaction Requests - let bridgeTxRequests: MemoizedTransactionRequest[] = []; - try { - bridgeTxRequests = await mantleAdapter.send(evmSender, evmSender, amountToBridge, route); - logger.info('Prepared bridge transaction request from Mantle adapter', { - requestId, - route, - bridgeType: mantleBridgeType, - bridgeTxRequests, - amountToBridge, - receiveAmount: receivedAmountStr, - transactionCount: bridgeTxRequests.length, - sender: evmSender, - recipient: evmSender, - }); - if (!bridgeTxRequests.length) { - throw new Error(`Failed to retrieve any bridge transaction requests`); - } - } catch (sendError) { - logger.error('Failed to get bridge transaction request from Mantle adapter', { - requestId, - route, - bridgeType: mantleBridgeType, - amountToBridge, - error: jsonifyError(sendError), - }); - continue; - } - - // Step 3: Submit the bridge transactions in order and create database record - try { - const { receipt, effectiveBridgedAmount } = await executeBridgeTransactions({ - context: { requestId, logger, chainService: selectedChainService as ChainService, config }, - route, - bridgeType: mantleBridgeType, - bridgeTxRequests, - amountToBridge: BigInt(amountToBridge), - senderOverride: { - address: evmSender, - label: isForFillService ? 'fill-service' : 'market-maker', - }, - }); - - // Step 4: Create database record for the Mantle bridge leg - try { - await createRebalanceOperation({ - earmarkId: null, // NULL indicates regular rebalancing - originChainId: route.origin, - destinationChainId: route.destination, - tickerHash: getTickerForAsset(route.asset, route.origin, config) || route.asset, - amount: effectiveBridgedAmount, - slippage: config.methRebalance!.bridge.slippageDbps, - status: RebalanceOperationStatus.PENDING, - bridge: mantleBridgeType, - transactions: receipt ? { [route.origin]: receipt } : undefined, - recipient: evmSender, - }); - - logger.info('Successfully created Mantle rebalance operation in database', { - requestId, - route, - bridgeType: mantleBridgeType, - originTxHash: receipt?.transactionHash, - amountToBridge: effectiveBridgedAmount, - originalRequestedAmount: amountToBridge.toString(), - receiveAmount: receivedAmountStr, - }); - } catch (error) { - logger.error('Failed to confirm transaction or create Mantle database record', { - requestId, - route, - bridgeType: mantleBridgeType, - transactionHash: receipt?.transactionHash, - error: jsonifyError(error), - }); - - // Don't consider this a success if we can't confirm or record it - continue; - } - } catch (sendError) { - logger.error('Failed to send or monitor Mantle bridge transaction', { - requestId, - route, - bridgeType: mantleBridgeType, - error: jsonifyError(sendError), - }); - continue; - } - } + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.COMPLETED, + txHashes: callbackTxHashes, + }); - if (successCallback) { - try { - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.COMPLETED, - txHashes: txHashes, - }); - - if (operation.earmarkId) { - await db.updateEarmarkStatus(operation.earmarkId, EarmarkStatus.COMPLETED); - } - logger.info('Successfully updated database with destination transaction', { - operationId: operation.id, - earmarkId: operation.earmarkId, - status: RebalanceOperationStatus.COMPLETED, - txHashes: txHashes, - }); - } catch (dbError) { - logger.error('Failed to update database with destination transaction', { - ...logContext, - error: jsonifyError(dbError), - errorMessage: (dbError as Error)?.message, - errorStack: (dbError as Error)?.stack, - }); - throw dbError; - } + if (operation.earmarkId) { + await db.updateEarmarkStatus(operation.earmarkId, EarmarkStatus.COMPLETED); } + logger.info('Successfully updated database with destination transaction', { + operationId: operation.id, + earmarkId: operation.earmarkId, + status: RebalanceOperationStatus.COMPLETED, + txHashes: callbackTxHashes, + }); } catch (dbError) { - logger.error('Failed to send to mantle', { - ...logContext, + logger.error('Failed to update database after successful callback — operation may be retried', { + operationId: operation.id, + earmarkId: operation.earmarkId, error: jsonifyError(dbError), - errorMessage: (dbError as Error)?.message, - errorStack: (dbError as Error)?.stack, }); - throw dbError; } } } -}; +} diff --git a/packages/poller/src/rebalance/rebalance.ts b/packages/poller/src/rebalance/rebalance.ts index 110171fa..105fb9ec 100644 --- a/packages/poller/src/rebalance/rebalance.ts +++ b/packages/poller/src/rebalance/rebalance.ts @@ -10,10 +10,8 @@ import { import { ProcessingContext } from '../init'; import { executeDestinationCallbacks } from './callbacks'; import { getValidatedZodiacConfig, getActualAddress } from '../helpers/zodiac'; -import { submitTransactionWithLogging } from '../helpers/transactions'; -import { RebalanceTransactionMemo } from '@mark/rebalance'; import { getEarmarkedBalance } from './onDemand'; -import { createRebalanceOperation, TransactionReceipt } from '@mark/database'; +import { executeEvmBridge } from './bridgeExecution'; export async function rebalanceInventory(context: ProcessingContext): Promise { const { logger, requestId, config, chainService, rebalance } = context; @@ -148,223 +146,46 @@ export async function rebalanceInventory(context: ProcessingContext): Promise${route.destination}`, + }); + if (result.actions.length > 0) { + rebalanceOperations.push(...result.actions); rebalanceSuccessful = true; - // If we got here, the rebalance for this route was successful with this bridge. break; // Exit the bridge preference loop for this route - } catch (error) { - logger.error('Failed to confirm transaction or create database record', { - requestId, - route, - bridgeType, - transactionHash: receipt?.transactionHash, - amountToBridge: amountToBridge, - receiveAmount: receivedAmount, - error: jsonifyError(error), - }); - - // Don't consider this a success if we can't confirm or record it - continue; // Try next bridge } - } catch (sendError) { - logger.error('Failed to send or monitor bridge transaction, trying next preference', { + // Empty actions means quote/slippage failure — try next preference + continue; + } catch (error) { + logger.error('Failed to execute bridge, trying next preference', { requestId, route, bridgeType, - transaction: bridgeTxRequests[idx], - transactionIndex: idx, - amountToBridge: amountToBridge, - error: jsonifyError(sendError), + amountToBridge: amountToBridge.toString(), + error: jsonifyError(error), }); - continue; // Skip to next bridge preference + continue; } } // End of bridge preference loop diff --git a/packages/poller/src/rebalance/registrations.ts b/packages/poller/src/rebalance/registrations.ts new file mode 100644 index 00000000..e2545ff1 --- /dev/null +++ b/packages/poller/src/rebalance/registrations.ts @@ -0,0 +1,37 @@ +import { registerRebalancer } from './registry'; +import { rebalanceMantleEth } from './mantleEth'; +import { rebalanceTacUsdt } from './tacUsdt'; +import { rebalanceAManUsde } from './aManUsde'; +import { rebalanceAMansyrupUsdt } from './aMansyrupUsdt'; +import { rebalanceSolanaUsdc } from './solanaUsdc'; + +registerRebalancer({ + runMode: 'methOnly', + displayName: 'meth', + handler: rebalanceMantleEth, + bridgeTags: ['mantle', 'across-mantle'], +}); +registerRebalancer({ + runMode: 'tacOnly', + displayName: 'TAC USDT', + handler: rebalanceTacUsdt, + bridgeTags: ['stargate-tac', 'tac-inner'], +}); +registerRebalancer({ + runMode: 'aManUsdeOnly', + displayName: 'aManUSDe', + handler: rebalanceAManUsde, + bridgeTags: ['stargate-amanusde'], +}); +registerRebalancer({ + runMode: 'aMansyrupUsdtOnly', + displayName: 'aMansyrupUSDT', + handler: rebalanceAMansyrupUsdt, + bridgeTags: ['stargate-amansyrupusdt'], +}); +registerRebalancer({ + runMode: 'solanaUsdcOnly', + displayName: 'Solana USDC → ptUSDe', + handler: rebalanceSolanaUsdc, + bridgeTags: ['ccip-solana-mainnet'], +}); diff --git a/packages/poller/src/rebalance/registry.ts b/packages/poller/src/rebalance/registry.ts new file mode 100644 index 00000000..a667368b --- /dev/null +++ b/packages/poller/src/rebalance/registry.ts @@ -0,0 +1,39 @@ +import { RebalanceAction } from '@mark/core'; +import { ProcessingContext } from '../init'; + +export interface RebalancerRegistration { + runMode: string; + displayName: string; + handler: (context: ProcessingContext) => Promise; + /** Bridge tags owned by this rebalancer (used by the generic callback handler to avoid races) */ + bridgeTags?: string[]; +} + +const registry: RebalancerRegistration[] = []; + +export function registerRebalancer(reg: RebalancerRegistration): void { + if (registry.some((r) => r.runMode === reg.runMode)) { + throw new Error(`Duplicate rebalancer registration for runMode: ${reg.runMode}`); + } + registry.push(reg); +} + +export function getRegisteredRebalancers(): readonly RebalancerRegistration[] { + return registry; +} + +/** + * Returns the set of bridge tags owned by registered rebalancers. + * The generic callback handler should skip operations with these tags. + */ +export function getRegisteredBridgeTags(): Set { + const tags = new Set(); + for (const reg of registry) { + if (reg.bridgeTags) { + for (const tag of reg.bridgeTags) { + tags.add(tag); + } + } + } + return tags; +} diff --git a/packages/poller/src/rebalance/solanaUsdc.ts b/packages/poller/src/rebalance/solanaUsdc.ts index 504869a7..7b2b746c 100644 --- a/packages/poller/src/rebalance/solanaUsdc.ts +++ b/packages/poller/src/rebalance/solanaUsdc.ts @@ -1,5 +1,5 @@ import { TransactionReceipt as ViemTransactionReceipt } from 'viem'; -import { safeParseBigInt } from '../helpers'; +import { safeParseBigInt, getEvmBalance } from '../helpers'; import { jsonifyError } from '@mark/logger'; import { RebalanceOperationStatus, @@ -19,6 +19,9 @@ import { SolanaSigner } from '@mark/chainservice'; import { createRebalanceOperation, TransactionReceipt } from '@mark/database'; import { submitTransactionWithLogging, TransactionSubmissionResult } from '../helpers/transactions'; import { RebalanceTransactionMemo, USDC_PTUSDE_PAIRS, CCIPBridgeAdapter, PendleBridgeAdapter } from '@mark/rebalance'; +import { RebalanceRunState } from './types'; +import { runThresholdRebalance, ThresholdRebalanceDescriptor } from './thresholdEngine'; +import { runCallbackLoop, RebalanceOperation } from './callbackEngine'; // Ticker hash from chaindata/everclear.json for cross-chain asset matching const USDC_TICKER_HASH = '0xd6aca1be9729c13d677335161321649cccae6a591554772516700f986f942eaa'; @@ -28,9 +31,6 @@ const PTUSDE_SOLANA_DECIMALS = 9; // PT-sUSDE has 9 decimals on Solana const USDC_SOLANA_DECIMALS = 6; // USDC has 6 decimals on Solana const PTUSDE_MAINNET_DECIMALS = 18; // PT-sUSDE has 18 decimals on Mainnet -// Default operation timeout: 24 hours (in minutes) -const DEFAULT_OPERATION_TTL_MINUTES = 24 * 60; - /** * Get Solana rebalance configuration from context. * Config is loaded from environment variables or config file in @mark/core config.ts @@ -49,20 +49,6 @@ function getSolanaRebalanceConfig(config: ProcessingContext['config']): SolanaRe return config.solanaPtusdeRebalance; } -/** - * Check if an operation has exceeded its TTL (time-to-live). - * Operations stuck in PENDING or AWAITING_CALLBACK for too long should be marked as failed. - * - * @param createdAt - Operation creation timestamp - * @param ttlMinutes - TTL in minutes (default: 24 hours) - * @returns true if operation has timed out - */ -function isOperationTimedOut(createdAt: Date, ttlMinutes: number = DEFAULT_OPERATION_TTL_MINUTES): boolean { - const maxAgeMs = ttlMinutes * 60 * 1000; - const operationAgeMs = Date.now() - createdAt.getTime(); - return operationAgeMs > maxAgeMs; -} - /** * Get the expected ptUSDe output for a given USDC input using Pendle API. * @@ -342,783 +328,663 @@ export async function rebalanceSolanaUsdc(context: ProcessingContext): Promise true, // Already checked solanaRebalanceConfig.enabled above + + hasInFlightOperations: async () => { + const { operations } = await context.database.getRebalanceOperations(undefined, undefined, { + status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + chainId: Number(SOLANA_CHAINID), + bridge: 'ccip-solana-mainnet', + }); + if (operations.length > 0) { + logger.info('In-flight Solana rebalance operations exist, skipping new rebalance to prevent overlap', { + requestId, + inFlightCount: operations.length, + inFlightOperationIds: operations.map((op) => op.id), + }); + } + return operations.length > 0; + }, + + getRecipientBalance: async () => { + // Get ptUSDe balance on Solana (in native 9-decimal units) + const connection = solanaSigner.getConnection(); + const walletPublicKey = solanaSigner.getPublicKey(); + const ptUsdeTokenAccount = await getAssociatedTokenAddress(PTUSDE_SOLANA_MINT, walletPublicKey); + let balance = 0n; + try { + const ptUsdeAccountInfo = await getAccount(connection, ptUsdeTokenAccount); + balance = ptUsdeAccountInfo.amount; + } catch (accountError) { + // Account might not exist if no ptUSDe has been received yet + logger.info('ptUSDe token account does not exist or is empty', { + requestId, + walletAddress: walletPublicKey.toBase58(), + ptUsdeTokenAccount: ptUsdeTokenAccount.toBase58(), + error: jsonifyError(accountError), + }); + return 0n; + } + logger.info('Retrieved Solana ptUSDe balance', { requestId, walletAddress: walletPublicKey.toBase58(), ptUsdeTokenAccount: ptUsdeTokenAccount.toBase58(), - error: jsonifyError(accountError), + balance: balance.toString(), + balanceInPtUsde: (Number(balance) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), }); - solanaPtUsdeBalance = 0n; - } + return balance; + }, - logger.info('Retrieved Solana ptUSDe balance', { - requestId, - walletAddress: walletPublicKey.toBase58(), - ptUsdeTokenAccount: ptUsdeTokenAccount.toBase58(), - balance: solanaPtUsdeBalance.toString(), - balanceInPtUsde: (Number(solanaPtUsdeBalance) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), - }); - } catch (error) { - logger.error('Failed to retrieve Solana ptUSDe balance', { - requestId, - error: jsonifyError(error), - }); - // Continue with 0 balance - this will trigger rebalancing if USDC is available - solanaPtUsdeBalance = 0n; - } + getThresholds: () => ({ + threshold: safeParseBigInt(solanaRebalanceConfig.ptUsdeThreshold), + target: safeParseBigInt(solanaRebalanceConfig.ptUsdeTarget), + }), - // Get Solana USDC balance - this is what we'll bridge if ptUSDe is low - let solanaUsdcBalance: bigint = 0n; - try { - const connection = solanaSigner.getConnection(); - const walletPublicKey = solanaSigner.getPublicKey(); + convertShortfallToBridgeAmount: async (ptUsdeShortfall) => { + // Convert ptUSDe shortfall (9 decimals) to USDC needed (6 decimals) via Pendle pricing + logger.info('Converting ptUSDe shortfall to USDC via Pendle pricing', { + requestId, + ptUsdeShortfall: ptUsdeShortfall.toString(), + ptUsdeShortfallFormatted: (Number(ptUsdeShortfall) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), + }); + const pendleAdapter = context.rebalance.getAdapter(SupportedBridge.Pendle) as PendleBridgeAdapter; + const usdcNeeded = await calculateRequiredUsdcForPtUsde(ptUsdeShortfall, pendleAdapter, logger); + if (usdcNeeded === null) { + throw new Error('Cannot determine accurate USDC requirement without Pendle API'); + } + logger.info('Pendle pricing: USDC required for ptUSDe shortfall', { + requestId, + ptUsdeShortfall: ptUsdeShortfall.toString(), + usdcNeeded: usdcNeeded.toString(), + usdcNeededFormatted: (Number(usdcNeeded) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), + }); + return usdcNeeded; + }, - const sourceTokenAccount = await getAssociatedTokenAddress(USDC_SOLANA_MINT, walletPublicKey); + getSenderBalance: async () => { + // Get Solana USDC balance (in native 6-decimal units) + const connection = solanaSigner.getConnection(); + const walletPublicKey = solanaSigner.getPublicKey(); + const sourceTokenAccount = await getAssociatedTokenAddress(USDC_SOLANA_MINT, walletPublicKey); + const tokenAccountInfo = await getAccount(connection, sourceTokenAccount); + const balance = tokenAccountInfo.amount; + logger.info('Retrieved Solana USDC balance for potential bridging', { + requestId, + walletAddress: walletPublicKey.toBase58(), + tokenAccount: sourceTokenAccount.toBase58(), + balance: balance.toString(), + balanceInUsdc: (Number(balance) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), + }); + return balance; + }, - const tokenAccountInfo = await getAccount(connection, sourceTokenAccount); - solanaUsdcBalance = tokenAccountInfo.amount; + getAmountCaps: () => ({ + min: minRebalanceAmount, + max: maxRebalanceAmount > 0n ? maxRebalanceAmount : undefined, + }), - logger.info('Retrieved Solana USDC balance for potential bridging', { - requestId, - walletAddress: walletPublicKey.toBase58(), - tokenAccount: sourceTokenAccount.toBase58(), - balance: solanaUsdcBalance.toString(), - balanceInUsdc: (Number(solanaUsdcBalance) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - }); - } catch (error) { - logger.error('Failed to retrieve Solana USDC balance', { - requestId, - error: jsonifyError(error), - }); - return rebalanceOperations; - } + executeBridge: async (ctx, amountToBridge) => { + if (!config.ownAddress) { + throw new Error('Recipient address (config.ownAddress) not configured'); + } - if (solanaUsdcBalance === 0n) { - logger.info('No Solana USDC balance available for bridging, skipping rebalancing', { requestId }); - return rebalanceOperations; - } + logger.info('Starting Leg 1: Solana to Mainnet CCIP bridge (threshold-based)', { + requestId, + amountToBridge: amountToBridge.toString(), + amountToBridgeInUsdc: (Number(amountToBridge) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), + recipientAddress: config.ownAddress, + trigger: 'threshold-based', + }); + + const solanaToMainnetRoute = { + origin: Number(SOLANA_CHAINID), + destination: Number(MAINNET_CHAIN_ID), + asset: USDC_SOLANA_MINT.toString(), + }; + + const bridgeResult = await executeSolanaToMainnetBridge({ + context: { requestId, logger, config, chainService, rebalance: context.rebalance }, + solanaSigner, + route: solanaToMainnetRoute, + amountToBridge, + recipientAddress: config.ownAddress, + }); + + if (!bridgeResult.receipt || bridgeResult.receipt.status !== 1) { + throw new Error(`Bridge transaction failed: ${bridgeResult.receipt?.transactionHash || 'Unknown transaction'}`); + } + + logger.info('Leg 1 bridge completed successfully', { + requestId, + transactionHash: bridgeResult.receipt.transactionHash, + effectiveAmount: bridgeResult.effectiveBridgedAmount, + blockNumber: bridgeResult.receipt.blockNumber, + }); + + await createRebalanceOperation({ + earmarkId: null, + originChainId: Number(SOLANA_CHAINID), + destinationChainId: Number(MAINNET_CHAIN_ID), + tickerHash: USDC_TICKER_HASH, + amount: bridgeResult.effectiveBridgedAmount, + slippage: 1000, + status: RebalanceOperationStatus.PENDING, + bridge: 'ccip-solana-mainnet', + transactions: { [SOLANA_CHAINID]: bridgeResult.receipt }, + recipient: config.ownAddress, + }); + + return [ + { + bridge: SupportedBridge.CCIP, + amount: bridgeResult.effectiveBridgedAmount, + origin: Number(SOLANA_CHAINID), + destination: Number(MAINNET_CHAIN_ID), + asset: USDC_SOLANA_MINT.toString(), + transaction: bridgeResult.receipt.transactionHash, + recipient: config.ownAddress, + }, + ]; + }, + }; - // Parse thresholds from configuration (in native decimals) const ptUsdeThreshold = safeParseBigInt(solanaRebalanceConfig.ptUsdeThreshold); const ptUsdeTarget = safeParseBigInt(solanaRebalanceConfig.ptUsdeTarget); - const minRebalanceAmount = safeParseBigInt(solanaRebalanceConfig.bridge.minRebalanceAmount); - const maxRebalanceAmount = safeParseBigInt(solanaRebalanceConfig.bridge.maxRebalanceAmount); - logger.info('Checking ptUSDe balance threshold for rebalancing decision', { + logger.info('Solana ptUSDe rebalance configuration', { requestId, - ptUsdeBalance: solanaPtUsdeBalance.toString(), - ptUsdeBalanceFormatted: (Number(solanaPtUsdeBalance) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), ptUsdeThreshold: ptUsdeThreshold.toString(), ptUsdeThresholdFormatted: (Number(ptUsdeThreshold) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), ptUsdeTarget: ptUsdeTarget.toString(), ptUsdeTargetFormatted: (Number(ptUsdeTarget) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), - shouldTriggerRebalance: solanaPtUsdeBalance < ptUsdeThreshold, - availableSolanaUsdc: solanaUsdcBalance.toString(), - availableSolanaUsdcFormatted: (Number(solanaUsdcBalance) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), + minRebalanceAmount: minRebalanceAmount.toString(), + minRebalanceAmountFormatted: (Number(minRebalanceAmount) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), + maxRebalanceAmount: maxRebalanceAmount.toString(), + maxRebalanceAmountFormatted: + maxRebalanceAmount > 0n ? (Number(maxRebalanceAmount) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6) : 'unlimited', configSource: config.solanaPtusdeRebalance ? 'explicit' : 'defaults', }); - if (solanaPtUsdeBalance >= ptUsdeThreshold) { - logger.info('ptUSDe balance is above threshold, no rebalancing needed', { - requestId, - ptUsdeBalance: solanaPtUsdeBalance.toString(), - ptUsdeThreshold: ptUsdeThreshold.toString(), - }); - return rebalanceOperations; - } + const runState: RebalanceRunState = { committedAmount: 0n }; + const thresholdActions = await runThresholdRebalance(context, solanaThresholdDescriptor, runState); + rebalanceOperations.push(...thresholdActions); - // Calculate how much USDC to bridge based on ptUSDe deficit and available Solana USDC - const ptUsdeShortfall = ptUsdeTarget - solanaPtUsdeBalance; + logger.info('Completed rebalancing Solana USDC', { + requestId, + operationCount: rebalanceOperations.length, + }); - // Get Pendle adapter for accurate pricing - const pendleAdapter = context.rebalance.getAdapter(SupportedBridge.Pendle) as PendleBridgeAdapter; + return rebalanceOperations; +} - // Calculate required USDC using Pendle API pricing - const usdcNeeded = await calculateRequiredUsdcForPtUsde(ptUsdeShortfall, pendleAdapter, logger); +export const executeSolanaUsdcCallbacks = async (context: ProcessingContext): Promise => { + return runCallbackLoop(context, { + name: 'Solana USDC', + bridge: 'ccip-solana-mainnet', + statuses: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + timeoutStatus: RebalanceOperationStatus.EXPIRED, + processOperation: (operation, ctx) => processSolanaOperation(operation, ctx), + }); +}; - // If Pendle API is unavailable, skip rebalancing - if (usdcNeeded === null) { - logger.error('Skipping rebalancing due to Pendle API unavailability', { - requestId, - ptUsdeShortfall: ptUsdeShortfall.toString(), - reason: 'Cannot determine accurate USDC requirement without Pendle API', - }); - return rebalanceOperations; +/** + * Process a single in-flight Solana USDC operation through its state machine. + * Dispatches to leg-specific handlers based on operation status. + */ +async function processSolanaOperation(operation: RebalanceOperation, context: ProcessingContext): Promise { + if (operation.status === RebalanceOperationStatus.PENDING) { + await processLeg1Completion(operation, context); + } else if (operation.status === RebalanceOperationStatus.AWAITING_CALLBACK) { + await checkLeg3Completion(operation, context); } +} - // Calculate amount to bridge: min(shortfall, available balance, max per operation) - let amountToBridge = usdcNeeded; - if (amountToBridge > solanaUsdcBalance) { - amountToBridge = solanaUsdcBalance; - } - if (maxRebalanceAmount && maxRebalanceAmount > 0n && amountToBridge > maxRebalanceAmount) { - amountToBridge = maxRebalanceAmount; - } +/** + * Leg 1 completion: Check if Solana→Mainnet CCIP bridge completed. + * On success, executes Leg 2 (USDC→ptUSDe swap) and Leg 3 (ptUSDe→Solana CCIP). + */ +async function processLeg1Completion(operation: RebalanceOperation, context: ProcessingContext): Promise { + const { logger, requestId, database: db } = context; + const logContext = { + requestId, + operationId: operation.id, + earmarkId: operation.earmarkId, + originChain: operation.originChainId, + destinationChain: operation.destinationChainId, + }; - // Check minimum rebalancing amount from config - if (amountToBridge < minRebalanceAmount) { - logger.warn('Calculated bridge amount is below minimum threshold, skipping rebalancing', { - requestId, - calculatedAmount: amountToBridge.toString(), - calculatedAmountFormatted: (Number(amountToBridge) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - minAmount: minRebalanceAmount.toString(), - minAmountFormatted: (Number(minRebalanceAmount) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - reason: 'Calculated bridge amount too small to be effective', - }); - return rebalanceOperations; + if (operation.originChainId !== Number(SOLANA_CHAINID) || operation.destinationChainId !== Number(MAINNET_CHAIN_ID)) { + return; } - logger.info('Calculated bridge amount based on ptUSDe deficit and available balance', { - requestId, - balanceChecks: { - ptUsdeShortfall: ptUsdeShortfall.toString(), - ptUsdeShortfallFormatted: (Number(ptUsdeShortfall) / 10 ** PTUSDE_SOLANA_DECIMALS).toFixed(6), - usdcNeeded: usdcNeeded.toString(), - usdcNeededFormatted: (Number(usdcNeeded) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - availableSolanaUsdc: solanaUsdcBalance.toString(), - availableSolanaUsdcFormatted: (Number(solanaUsdcBalance) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - maxRebalanceAmount: maxRebalanceAmount?.toString() ?? 'unlimited', - maxRebalanceAmountFormatted: maxRebalanceAmount - ? (Number(maxRebalanceAmount) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6) - : 'unlimited', - }, - bridgeDecision: { - finalAmountToBridge: amountToBridge.toString(), - finalAmountToBridgeFormatted: (Number(amountToBridge) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - isPartialBridge: solanaUsdcBalance < usdcNeeded, - utilizationPercentage: ((Number(amountToBridge) / Number(solanaUsdcBalance)) * 100).toFixed(2) + '%', - }, - }); - - // Check for in-flight operations to prevent overlapping rebalances - const { operations: inFlightSolanaOps } = await context.database.getRebalanceOperations(undefined, undefined, { - status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], - chainId: Number(SOLANA_CHAINID), - bridge: 'ccip-solana-mainnet', + logger.info('Checking if CCIP bridge completed and USDC arrived on Mainnet', { + ...logContext, + bridge: operation.bridge, + amount: operation.amount, }); - if (inFlightSolanaOps.length > 0) { - logger.info('In-flight Solana rebalance operations exist, skipping new rebalance to prevent overlap', { - requestId, - inFlightCount: inFlightSolanaOps.length, - inFlightOperationIds: inFlightSolanaOps.map((op) => op.id), + // Get the Solana transaction hash from the stored receipt + const solanaTransactionHash = operation.transactions?.[SOLANA_CHAINID]?.transactionHash; + if (!solanaTransactionHash) { + logger.warn('No Solana transaction hash found for CCIP operation', { + ...logContext, + transactions: operation.transactions, }); - return rebalanceOperations; + return; } - // Prepare route for Solana to Mainnet bridge - const solanaToMainnetRoute = { - origin: Number(SOLANA_CHAINID), - destination: Number(MAINNET_CHAIN_ID), - asset: USDC_SOLANA_MINT.toString(), - }; - - logger.info('Starting Leg 1: Solana to Mainnet CCIP bridge (threshold-based)', { - requestId, - route: solanaToMainnetRoute, - amountToBridge: amountToBridge.toString(), - amountToBridgeInUsdc: (Number(amountToBridge) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - recipientAddress: config.ownAddress, - trigger: 'threshold-based', - ptUsdeBalance: solanaPtUsdeBalance.toString(), - ptUsdeThreshold: ptUsdeThreshold.toString(), + // Use CCIP adapter to check transaction status + const ccipAdapter = context.rebalance.getAdapter(SupportedBridge.CCIP) as CCIPBridgeAdapter; + const ccipStatus = await ccipAdapter.getTransferStatus( + solanaTransactionHash, + Number(SOLANA_CHAINID), + Number(MAINNET_CHAIN_ID), + ); + + const createdAt = operation.createdAt ? new Date(operation.createdAt).getTime() : Date.now(); + const timeSinceCreation = new Date().getTime() - createdAt; + + logger.info('CCIP bridge status check', { + ...logContext, + solanaTransactionHash, + ccipStatus: ccipStatus.status, + ccipMessage: ccipStatus.message, + destinationTransactionHash: ccipStatus.destinationTransactionHash, + timeSinceCreation, }); - try { - // Pre-flight checks - if (!config.ownAddress) { - throw new Error('Recipient address (config.ownAddress) not configured'); - } - - // Validate balance - if (solanaUsdcBalance < amountToBridge) { - throw new Error( - `Insufficient Solana USDC balance. Required: ${amountToBridge.toString()}, Available: ${solanaUsdcBalance.toString()}`, - ); + if (ccipStatus.status === 'SUCCESS') { + // IDEMPOTENCY CHECK: Check if we already have a Mainnet transaction hash + // which would indicate Leg 2/3 have already been executed + const existingMainnetTx = operation.transactions?.[MAINNET_CHAIN_ID]?.transactionHash; + if (existingMainnetTx) { + logger.info('Leg 2/3 already executed (Mainnet tx hash exists), skipping duplicate execution', { + ...logContext, + existingMainnetTx, + solanaTransactionHash, + }); + return; } - logger.info('Performing pre-bridge validation checks', { - requestId, - trigger: 'threshold-based', - checks: { - solanaUsdcBalance: solanaUsdcBalance.toString(), - solanaUsdcBalanceFormatted: (Number(solanaUsdcBalance) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - amountToBridge: amountToBridge.toString(), - amountToBridgeFormatted: (Number(amountToBridge) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - hasSufficientBalance: solanaUsdcBalance >= amountToBridge, - recipientValid: !!config.ownAddress, - recipient: config.ownAddress, - }, + logger.info('CCIP bridge completed successfully, initiating Leg 2: USDC → ptUSDe swap', { + ...logContext, + solanaTransactionHash, + proceedingToLeg2: true, }); - // Execute Leg 1: Solana to Mainnet bridge - const bridgeResult = await executeSolanaToMainnetBridge({ - context: { requestId, logger, config, chainService, rebalance: context.rebalance }, - solanaSigner, - route: solanaToMainnetRoute, - amountToBridge, - recipientAddress: config.ownAddress, + // Execute Legs 2 and 3. The function handles its own errors internally + // (sets CANCELLED on failure). Status is updated to AWAITING_CALLBACK + // inside executeLeg2And3 only after Leg 3 tx hash is stored, preventing + // the operation from being stuck in AWAITING_CALLBACK with no tx hash. + await executeLeg2And3(operation, context, logContext); + } else if (ccipStatus.status === 'FAILURE') { + logger.error('CCIP bridge transaction failed', { + ...logContext, + solanaTransactionHash, + ccipMessage: ccipStatus.message, + shouldRetry: false, }); - if (!bridgeResult.receipt || bridgeResult.receipt.status !== 1) { - throw new Error(`Bridge transaction failed: ${bridgeResult.receipt?.transactionHash || 'Unknown transaction'}`); - } - - logger.info('Leg 1 bridge completed successfully', { - requestId, - transactionHash: bridgeResult.receipt.transactionHash, - effectiveAmount: bridgeResult.effectiveBridgedAmount, - blockNumber: bridgeResult.receipt.blockNumber, - solanaSlot: bridgeResult.receipt.blockNumber, + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.FAILED, }); - // Create rebalance operation record for tracking all 3 legs (no earmark for threshold-based) - try { - await createRebalanceOperation({ - earmarkId: null, // No earmark for threshold-based rebalancing - originChainId: Number(SOLANA_CHAINID), - destinationChainId: Number(MAINNET_CHAIN_ID), - tickerHash: USDC_TICKER_HASH, - amount: bridgeResult.effectiveBridgedAmount, - slippage: 1000, // 1% slippage - status: RebalanceOperationStatus.PENDING, // pending as CCIP takes 20 mins to bridge - bridge: 'ccip-solana-mainnet', - transactions: { [SOLANA_CHAINID]: bridgeResult.receipt }, - recipient: config.ownAddress, - }); - - logger.info('Rebalance operation record created for Leg 1', { - requestId, - operationStatus: RebalanceOperationStatus.PENDING, - note: 'Status is PENDING because CCIP takes ~20 minutes to complete', - }); - - const rebalanceAction: RebalanceAction = { - bridge: SupportedBridge.CCIP, - amount: bridgeResult.effectiveBridgedAmount, - origin: Number(SOLANA_CHAINID), - destination: Number(MAINNET_CHAIN_ID), - asset: USDC_SOLANA_MINT.toString(), - transaction: bridgeResult.receipt.transactionHash, - recipient: config.ownAddress, - }; - rebalanceOperations.push(rebalanceAction); + logger.info('Marked operation as FAILED due to CCIP bridge failure', { + ...logContext, + note: 'Leg 1 CCIP bridge failed - funds may still be on Solana', + }); + } else { + // CCIP still pending + const twentyMinutesMs = 20 * 60 * 1000; - logger.info('Leg 1 rebalance completed successfully', { - requestId, - bridgedAmount: bridgeResult.effectiveBridgedAmount, - bridgedAmountInUsdc: (Number(bridgeResult.effectiveBridgedAmount) / 10 ** USDC_SOLANA_DECIMALS).toFixed(6), - transactionHash: bridgeResult.receipt.transactionHash, + if (timeSinceCreation > twentyMinutesMs) { + logger.warn('CCIP bridge taking longer than expected', { + ...logContext, + solanaTransactionHash, + timeSinceCreation, + expectedMaxTime: twentyMinutesMs, + ccipStatus: ccipStatus.status, + ccipMessage: ccipStatus.message, + shouldInvestigate: true, }); - } catch (dbError) { - logger.error('Failed to create rebalance operation record', { - requestId, - error: jsonifyError(dbError), + } else { + logger.debug('CCIP bridge still pending within expected timeframe', { + ...logContext, + solanaTransactionHash, + timeSinceCreation, + remainingTime: twentyMinutesMs - timeSinceCreation, + ccipStatus: ccipStatus.status, }); - // Don't throw here - the bridge was successful, just the record creation failed } - } catch (bridgeError) { - logger.error('Leg 1 bridge operation failed', { - requestId, - route: solanaToMainnetRoute, - amountToBridge: amountToBridge.toString(), - error: jsonifyError(bridgeError), - errorMessage: (bridgeError as Error)?.message, - errorStack: (bridgeError as Error)?.stack, - }); } - - logger.info('Completed rebalancing Solana USDC', { requestId }); - - return rebalanceOperations; } -export const executeSolanaUsdcCallbacks = async (context: ProcessingContext): Promise => { - const { logger, requestId, database: db } = context; - logger.info('Executing destination callbacks for Solana USDC rebalance', { requestId }); - - // Get all pending CCIP operations from Solana to Mainnet - const { operations: pendingSolanaOps } = await db.getRebalanceOperations(undefined, undefined, { - status: [RebalanceOperationStatus.PENDING], - chainId: Number(SOLANA_CHAINID), - bridge: 'ccip-solana-mainnet', - }); +/** + * Execute Leg 2 (Mainnet USDC → ptUSDe via Pendle) and Leg 3 (ptUSDe → Solana via CCIP). + */ +async function executeLeg2And3( + operation: RebalanceOperation, + context: ProcessingContext, + logContext: Record, +): Promise { + const { logger, requestId, database: db, rebalance, config: rebalanceConfig, chainService } = context; - logger.debug('Found pending Solana USDC rebalance operations', { - count: pendingSolanaOps.length, - requestId, - status: RebalanceOperationStatus.PENDING, - }); + try { + logger.info('Executing Leg 2: Mainnet USDC → ptUSDe via Pendle adapter', logContext); - for (const operation of pendingSolanaOps) { - const logContext = { - requestId, - operationId: operation.id, - earmarkId: operation.earmarkId, - originChain: operation.originChainId, - destinationChain: operation.destinationChainId, - }; + // Get the Pendle adapter + const pendleAdapter = rebalance.getAdapter(SupportedBridge.Pendle); + if (!pendleAdapter) { + logger.error('Pendle adapter not found', logContext); + return; + } - if ( - operation.originChainId !== Number(SOLANA_CHAINID) || - operation.destinationChainId !== Number(MAINNET_CHAIN_ID) - ) { - continue; + // Get USDC address on mainnet for the swap + const usdcAddress = getTokenAddressFromConfig(USDC_TICKER_HASH, MAINNET_CHAIN_ID.toString(), rebalanceConfig); + if (!usdcAddress) { + logger.error('Could not find USDC address for mainnet', logContext); + return; } - // Check for operation timeout - mark as failed if stuck for too long - if (operation.createdAt && isOperationTimedOut(new Date(operation.createdAt))) { - logger.warn('Operation has exceeded TTL, marking as FAILED', { - ...logContext, - createdAt: operation.createdAt, - ttlMinutes: DEFAULT_OPERATION_TTL_MINUTES, - }); + const storedRecipient = operation.recipient; + const recipient = storedRecipient || rebalanceConfig.ownAddress; + + // Get ptUSDe address from the USDC_PTUSDE_PAIRS config + const tokenPair = USDC_PTUSDE_PAIRS[Number(MAINNET_CHAIN_ID)]; + if (!tokenPair?.ptUSDe) { + logger.error('ptUSDe address not configured for mainnet in USDC_PTUSDE_PAIRS', logContext); await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.EXPIRED, + status: RebalanceOperationStatus.CANCELLED, }); - continue; + return; } - logger.info('Checking if CCIP bridge completed and USDC arrived on Mainnet', { - ...logContext, - bridge: operation.bridge, - amount: operation.amount, - }); + const ptUsdeAddress = tokenPair.ptUSDe; + // Use actual USDC balance on Mainnet instead of operation.amount to account for + // potential differences from CCIP fees or rounding during the cross-chain transfer + let swapAmount = operation.amount; try { - // Get the Solana transaction hash from the stored receipt - const solanaTransactionHash = operation.transactions?.[SOLANA_CHAINID]?.transactionHash; - if (!solanaTransactionHash) { - logger.warn('No Solana transaction hash found for CCIP operation', { + const usdcBalance18 = await getEvmBalance( + rebalanceConfig, + MAINNET_CHAIN_ID.toString(), + recipient, + usdcAddress, + USDC_SOLANA_DECIMALS, + context.prometheus, + ); + // getEvmBalance returns 18-decimal normalized values — convert back to native 6-decimal + // for comparison with operation.amount (which is stored in native 6-decimal units) + const usdcBalanceNative = usdcBalance18 / BigInt(10 ** (18 - USDC_SOLANA_DECIMALS)); + const operationAmount = safeParseBigInt(operation.amount); + if (usdcBalanceNative < operationAmount) { + logger.warn('Actual USDC balance on Mainnet is less than operation amount (CCIP fees/rounding)', { ...logContext, - transactions: operation.transactions, + operationAmount: operation.amount, + actualBalance: usdcBalanceNative.toString(), + difference: (operationAmount - usdcBalanceNative).toString(), }); - continue; + swapAmount = usdcBalanceNative.toString(); } - - // Use CCIP adapter to check transaction status - const ccipAdapter = context.rebalance.getAdapter(SupportedBridge.CCIP) as CCIPBridgeAdapter; - const ccipStatus = await ccipAdapter.getTransferStatus( - solanaTransactionHash, - Number(SOLANA_CHAINID), - Number(MAINNET_CHAIN_ID), - ); - - const createdAt = operation.createdAt ? new Date(operation.createdAt).getTime() : Date.now(); - const timeSinceCreation = new Date().getTime() - createdAt; - - logger.info('CCIP bridge status check', { + } catch (balanceError) { + logger.warn('Failed to check actual USDC balance on Mainnet, using operation amount', { ...logContext, - solanaTransactionHash, - ccipStatus: ccipStatus.status, - ccipMessage: ccipStatus.message, - destinationTransactionHash: ccipStatus.destinationTransactionHash, - timeSinceCreation, + error: jsonifyError(balanceError), + fallbackAmount: operation.amount, }); + } - if (ccipStatus.status === 'SUCCESS') { - // IDEMPOTENCY CHECK: Check if we already have a Mainnet transaction hash - // which would indicate Leg 2/3 have already been executed - const existingMainnetTx = operation.transactions?.[MAINNET_CHAIN_ID]?.transactionHash; - if (existingMainnetTx) { - logger.info('Leg 2/3 already executed (Mainnet tx hash exists), skipping duplicate execution', { - ...logContext, - existingMainnetTx, - solanaTransactionHash, - }); - // Status should already be AWAITING_CALLBACK, just continue to next operation - continue; - } - - logger.info('CCIP bridge completed successfully, initiating Leg 2: USDC → ptUSDe swap', { - ...logContext, - solanaTransactionHash, - proceedingToLeg2: true, - }); + logger.debug('Leg 2 Pendle swap details', { + ...logContext, + storedRecipient, + fallbackRecipient: rebalanceConfig.ownAddress, + finalRecipient: recipient, + usdcAddress, + ptUsdeAddress, + amountToSwap: swapAmount, + }); - // Update operation to AWAITING_CALLBACK to indicate Leg 1 is done, Leg 2 starting - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.AWAITING_CALLBACK, - }); + // Create route for USDC → ptUSDe swap on mainnet (same chain swap) + const pendleRoute = { + asset: usdcAddress, + origin: Number(MAINNET_CHAIN_ID), + destination: Number(MAINNET_CHAIN_ID), + swapOutputAsset: ptUsdeAddress, + }; - // Execute Leg 2: Mainnet USDC → ptUSDe using Pendle adapter - logger.info('Executing Leg 2: Mainnet USDC → ptUSDe via Pendle adapter', logContext); - - try { - const { rebalance, config: rebalanceConfig } = context; - - // Get the Pendle adapter - const pendleAdapter = rebalance.getAdapter(SupportedBridge.Pendle); - if (!pendleAdapter) { - logger.error('Pendle adapter not found', logContext); - continue; - } - - // Get USDC address on mainnet for the swap - const usdcAddress = getTokenAddressFromConfig(USDC_TICKER_HASH, MAINNET_CHAIN_ID.toString(), rebalanceConfig); - if (!usdcAddress) { - logger.error('Could not find USDC address for mainnet', logContext); - continue; - } - - // Use stored recipient from Leg 1 operation to ensure consistency - const storedRecipient = operation.recipient; - const recipient = storedRecipient || rebalanceConfig.ownAddress; - - // Get ptUSDe address from the USDC_PTUSDE_PAIRS config - const tokenPair = USDC_PTUSDE_PAIRS[Number(MAINNET_CHAIN_ID)]; - if (!tokenPair?.ptUSDe) { - logger.error('ptUSDe address not configured for mainnet in USDC_PTUSDE_PAIRS', logContext); - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.CANCELLED, - }); - continue; - } - - const ptUsdeAddress = tokenPair.ptUSDe; - - logger.debug('Leg 2 Pendle swap details', { - ...logContext, - storedRecipient, - fallbackRecipient: rebalanceConfig.ownAddress, - finalRecipient: recipient, - usdcAddress, - ptUsdeAddress, - amountToSwap: operation.amount, - }); - - // Create route for USDC → ptUSDe swap on mainnet (same chain swap) - const pendleRoute = { - asset: usdcAddress, - origin: Number(MAINNET_CHAIN_ID), - destination: Number(MAINNET_CHAIN_ID), // Same chain swap - swapOutputAsset: ptUsdeAddress, // Target ptUSDe (actual address) - }; - - // Get quote from Pendle for USDC → ptUSDe - const receivedAmountStr = await pendleAdapter.getReceivedAmount(operation.amount, pendleRoute); - - logger.info('Received Pendle quote for USDC → ptUSDe swap', { - ...logContext, - amountToSwap: operation.amount, - expectedPtUsde: receivedAmountStr, - route: pendleRoute, - }); - - // Execute the Pendle swap transactions - const swapTxRequests = await pendleAdapter.send(recipient, recipient, operation.amount, pendleRoute); - - if (!swapTxRequests.length) { - logger.error('No swap transactions returned from Pendle adapter', logContext); - continue; - } - - logger.info('Executing Pendle USDC → ptUSDe swap transactions', { - ...logContext, - transactionCount: swapTxRequests.length, - recipient, - }); - - // Execute each transaction in the swap sequence - let effectivePtUsdeAmount = receivedAmountStr; - - for (const { transaction, memo, effectiveAmount } of swapTxRequests) { - logger.info('Submitting Pendle swap transaction', { - requestId, - memo, - transaction, - }); - - const result = await submitTransactionWithLogging({ - chainService: context.chainService, - logger, - chainId: MAINNET_CHAIN_ID.toString(), - txRequest: { - to: transaction.to!, - data: transaction.data!, - value: (transaction.value || 0).toString(), - chainId: Number(MAINNET_CHAIN_ID), - from: rebalanceConfig.ownAddress, - funcSig: transaction.funcSig || '', - }, - zodiacConfig: { - walletType: WalletType.EOA, - }, - context: { requestId, route: pendleRoute, bridgeType: SupportedBridge.Pendle, transactionType: memo }, - }); - - logger.info('Successfully submitted Pendle swap transaction', { - requestId, - memo, - transactionHash: result.hash, - }); - - if (memo === RebalanceTransactionMemo.Rebalance) { - if (effectiveAmount) { - effectivePtUsdeAmount = effectiveAmount; - } - } - } - - // Execute Leg 3: ptUSDe → Solana CCIP immediately after Leg 2 - logger.info('Executing Leg 3: Mainnet ptUSDe → Solana via CCIP adapter', logContext); - - const ccipAdapter = context.rebalance.getAdapter(SupportedBridge.CCIP); - - // Reuse ptUsdeAddress from Leg 2 scope for Leg 3 - - // Create route for ptUSDe → Solana CCIP bridge - const ccipRoute = { - asset: ptUsdeAddress, - origin: Number(MAINNET_CHAIN_ID), - destination: Number(SOLANA_CHAINID), // Back to Solana - }; - - // Execute Leg 3 CCIP transactions - const solanaRecipient = context.solanaSigner?.getAddress(); - if (!solanaRecipient) throw new Error('Solana signer address unavailable for CCIP leg 3'); - - const ccipTxRequests = await ccipAdapter.send(recipient, solanaRecipient, effectivePtUsdeAmount, ccipRoute); - - let leg3CcipTx: TransactionSubmissionResult | undefined; - - for (const { transaction, memo } of ccipTxRequests) { - logger.info('Submitting CCIP ptUSDe → Solana transaction', { - requestId, - memo, - transaction, - }); - - const result = await submitTransactionWithLogging({ - chainService: context.chainService, - logger, - chainId: MAINNET_CHAIN_ID.toString(), - txRequest: { - to: transaction.to!, - data: transaction.data!, - value: (transaction.value || 0).toString(), - chainId: Number(MAINNET_CHAIN_ID), - from: rebalanceConfig.ownAddress, - funcSig: transaction.funcSig || '', - }, - zodiacConfig: { - walletType: WalletType.EOA, - }, - context: { requestId, route: ccipRoute, bridgeType: SupportedBridge.CCIP, transactionType: memo }, - }); - - logger.info('Successfully submitted CCIP transaction', { - requestId, - memo, - transactionHash: result.hash, - }); - - // Store the CCIP bridge transaction hash (not approval) - if (memo === RebalanceTransactionMemo.Rebalance) { - leg3CcipTx = result; - } - } - - // Update operation with Leg 3 CCIP transaction hash for status tracking - if (leg3CcipTx) { - const leg3Receipt: TransactionReceipt = leg3CcipTx.receipt!; - - const insertedTransactions = { - [MAINNET_CHAIN_ID]: leg3Receipt, - }; - - await db.updateRebalanceOperation(operation.id, { - txHashes: insertedTransactions, - }); - - logger.info('Stored Leg 3 CCIP transaction hash for status tracking', { - requestId, - operationId: operation.id, - leg3CcipTxHash: leg3CcipTx.hash, - }); - } - - // Keep status as AWAITING_CALLBACK - Leg 3 CCIP takes 20+ minutes - // Will be checked in next callback cycle - logger.info('Legs 1, 2, and 3 submitted successfully', { - ...logContext, - ptUsdeAmount: effectivePtUsdeAmount, - note: 'Leg 1: Done, Leg 2: Done, Leg 3: CCIP submitted, waiting for completion', - status: 'AWAITING_CALLBACK', - }); - } catch (pendleError) { - logger.error('Failed to execute Leg 2/3', { - ...logContext, - error: jsonifyError(pendleError), - }); - - // Mark operation as FAILED since Leg 2/3 failed - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.CANCELLED, - }); - - logger.info('Marked operation as FAILED due to Leg 2/3 failure', { - ...logContext, - note: 'Funds are on Mainnet as USDC - manual intervention may be required', - }); - } - } else if (ccipStatus.status === 'FAILURE') { - logger.error('CCIP bridge transaction failed', { - ...logContext, - solanaTransactionHash, - ccipMessage: ccipStatus.message, - shouldRetry: false, - }); + // Get quote from Pendle for USDC → ptUSDe + const receivedAmountStr = await pendleAdapter.getReceivedAmount(swapAmount, pendleRoute); - // Mark operation as FAILED since CCIP bridge failed - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.FAILED, - }); + logger.info('Received Pendle quote for USDC → ptUSDe swap', { + ...logContext, + amountToSwap: swapAmount, + expectedPtUsde: receivedAmountStr, + route: pendleRoute, + }); - logger.info('Marked operation as FAILED due to CCIP bridge failure', { - ...logContext, - note: 'Leg 1 CCIP bridge failed - funds may still be on Solana', - }); - } else { - // CCIP still pending - check if it's been too long (CCIP typically takes 20 minutes) - const twentyMinutesMs = 20 * 60 * 1000; - - if (timeSinceCreation > twentyMinutesMs) { - logger.warn('CCIP bridge taking longer than expected', { - ...logContext, - solanaTransactionHash, - timeSinceCreation, - expectedMaxTime: twentyMinutesMs, - ccipStatus: ccipStatus.status, - ccipMessage: ccipStatus.message, - shouldInvestigate: true, - }); - } else { - logger.debug('CCIP bridge still pending within expected timeframe', { - ...logContext, - solanaTransactionHash, - timeSinceCreation, - remainingTime: twentyMinutesMs - timeSinceCreation, - ccipStatus: ccipStatus.status, - }); - } - } - } catch (error) { - logger.error('Failed to check CCIP bridge completion status', { - ...logContext, - error: jsonifyError(error), - }); + // Execute the Pendle swap transactions + const swapTxRequests = await pendleAdapter.send(recipient, recipient, swapAmount, pendleRoute); + + if (!swapTxRequests.length) { + logger.error('No swap transactions returned from Pendle adapter', logContext); + return; } - } - // Check operations in AWAITING_CALLBACK status for Leg 3 (ptUSDe → Solana CCIP) completion - const { operations: awaitingCallbackOps } = await db.getRebalanceOperations(undefined, undefined, { - status: [RebalanceOperationStatus.AWAITING_CALLBACK], - bridge: 'ccip-solana-mainnet', - }); + logger.info('Executing Pendle USDC → ptUSDe swap transactions', { + ...logContext, + transactionCount: swapTxRequests.length, + recipient, + }); - logger.debug('Found operations awaiting Leg 3 CCIP completion', { - count: awaitingCallbackOps.length, - requestId, - status: RebalanceOperationStatus.AWAITING_CALLBACK, - }); + let effectivePtUsdeAmount = receivedAmountStr; - for (const operation of awaitingCallbackOps) { - const logContext = { - requestId, - operationId: operation.id, - earmarkId: operation.earmarkId, - originChain: operation.originChainId, - destinationChain: operation.destinationChainId, - }; + for (const { transaction, memo, effectiveAmount } of swapTxRequests) { + logger.info('Submitting Pendle swap transaction', { + requestId, + memo, + transaction, + }); - // Check for operation timeout - mark as failed if stuck for too long - if (operation.createdAt && isOperationTimedOut(new Date(operation.createdAt))) { - logger.warn('AWAITING_CALLBACK operation has exceeded TTL, marking as FAILED', { - ...logContext, - createdAt: operation.createdAt, - ttlMinutes: DEFAULT_OPERATION_TTL_MINUTES, - note: 'Leg 3 CCIP may have failed or taken too long', + const result = await submitTransactionWithLogging({ + chainService, + logger, + chainId: MAINNET_CHAIN_ID.toString(), + txRequest: { + to: transaction.to!, + data: transaction.data!, + value: (transaction.value || 0).toString(), + chainId: Number(MAINNET_CHAIN_ID), + from: rebalanceConfig.ownAddress, + funcSig: transaction.funcSig || '', + }, + zodiacConfig: { + walletType: WalletType.EOA, + }, + context: { requestId, route: pendleRoute, bridgeType: SupportedBridge.Pendle, transactionType: memo }, }); - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.EXPIRED, + + logger.info('Successfully submitted Pendle swap transaction', { + requestId, + memo, + transactionHash: result.hash, }); - continue; + + if (memo === RebalanceTransactionMemo.Rebalance && effectiveAmount) { + effectivePtUsdeAmount = effectiveAmount; + } } - logger.info('Checking Leg 3 CCIP completion (ptUSDe → Solana)', logContext); + // Execute Leg 3: ptUSDe → Solana CCIP immediately after Leg 2 + logger.info('Executing Leg 3: Mainnet ptUSDe → Solana via CCIP adapter', logContext); - try { - // Get Leg 3 CCIP transaction hash from mainnet transactions - const mainnetTransactionHash = operation.transactions?.[MAINNET_CHAIN_ID]?.transactionHash; - if (!mainnetTransactionHash) { - logger.warn('No Leg 3 CCIP transaction hash found', { - ...logContext, - transactions: operation.transactions, - }); - continue; - } + const ccipAdapter = rebalance.getAdapter(SupportedBridge.CCIP); - // Check if Leg 3 CCIP (ptUSDe → Solana) is ready on destination - const ccipAdapter = context.rebalance.getAdapter(SupportedBridge.CCIP) as CCIPBridgeAdapter; + const ccipRoute = { + asset: ptUsdeAddress, + origin: Number(MAINNET_CHAIN_ID), + destination: Number(SOLANA_CHAINID), + }; - const leg3Route = { - origin: Number(MAINNET_CHAIN_ID), - destination: Number(SOLANA_CHAINID), - asset: '', // Will be filled by adapter - }; + const solanaRecipient = context.solanaSigner?.getAddress(); + if (!solanaRecipient) throw new Error('Solana signer address unavailable for CCIP leg 3'); - // Create minimal receipt for readyOnDestination - the CCIP adapter only uses - // transactionHash and status fields, so we cast a partial object - const isLeg3Ready = await ccipAdapter.readyOnDestination('0', leg3Route, { - transactionHash: mainnetTransactionHash, - status: 'success', - } as ViemTransactionReceipt); + const ccipTxRequests = await ccipAdapter.send(recipient, solanaRecipient, effectivePtUsdeAmount, ccipRoute); - logger.info('Leg 3 CCIP readiness check', { - ...logContext, - mainnetTransactionHash, - isReady: isLeg3Ready, - route: leg3Route, + let leg3CcipTx: TransactionSubmissionResult | undefined; + + for (const { transaction, memo } of ccipTxRequests) { + logger.info('Submitting CCIP ptUSDe → Solana transaction', { + requestId, + memo, + transaction, }); - if (isLeg3Ready) { - // All 3 legs completed successfully - await db.updateRebalanceOperation(operation.id, { - status: RebalanceOperationStatus.COMPLETED, - }); + const result = await submitTransactionWithLogging({ + chainService, + logger, + chainId: MAINNET_CHAIN_ID.toString(), + txRequest: { + to: transaction.to!, + data: transaction.data!, + value: (transaction.value || 0).toString(), + chainId: Number(MAINNET_CHAIN_ID), + from: rebalanceConfig.ownAddress, + funcSig: transaction.funcSig || '', + }, + zodiacConfig: { + walletType: WalletType.EOA, + }, + context: { requestId, route: ccipRoute, bridgeType: SupportedBridge.CCIP, transactionType: memo }, + }); + + logger.info('Successfully submitted CCIP transaction', { + requestId, + memo, + transactionHash: result.hash, + }); + + if (memo === RebalanceTransactionMemo.Rebalance) { + leg3CcipTx = result; + } + } - logger.info('All 3 legs completed successfully', { + // Update operation with Leg 3 CCIP transaction hash for status tracking + if (leg3CcipTx) { + if (!leg3CcipTx.receipt) { + logger.error('Leg 3 CCIP transaction submitted but receipt is missing — operation may get stuck', { ...logContext, - mainnetTransactionHash, - note: 'Leg 1: Solana→Mainnet CCIP ✓, Leg 2: USDC→ptUSDe ✓, Leg 3: ptUSDe→Solana CCIP ✓', - finalStatus: 'COMPLETED', + leg3CcipTxHash: leg3CcipTx.hash, }); } else { - logger.debug('Leg 3 CCIP still pending', { - ...logContext, - mainnetTransactionHash, - note: 'Waiting for ptUSDe → Solana CCIP to complete', + await db.updateRebalanceOperation(operation.id, { + txHashes: { [MAINNET_CHAIN_ID]: leg3CcipTx.receipt }, + }); + + logger.info('Stored Leg 3 CCIP transaction hash for status tracking', { + requestId, + operationId: operation.id, + leg3CcipTxHash: leg3CcipTx.hash, }); } - } catch (error) { - logger.error('Failed to check Leg 3 CCIP completion', { - ...logContext, - error: jsonifyError(error), - }); } + + // Only transition to AWAITING_CALLBACK after all legs are submitted and tx hash is stored. + // This ensures the operation is never in AWAITING_CALLBACK without a stored tx hash. + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.AWAITING_CALLBACK, + }); + + logger.info('Legs 1, 2, and 3 submitted successfully', { + ...logContext, + ptUsdeAmount: effectivePtUsdeAmount, + note: 'Leg 1: Done, Leg 2: Done, Leg 3: CCIP submitted, waiting for completion', + status: 'AWAITING_CALLBACK', + }); + } catch (pendleError) { + logger.error('Failed to execute Leg 2/3', { + ...logContext, + error: jsonifyError(pendleError), + }); + + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.CANCELLED, + }); + + logger.info('Marked operation as CANCELLED due to Leg 2/3 failure', { + ...logContext, + note: 'Funds are on Mainnet as USDC or ptUSDe (depending on which leg failed) - manual intervention required', + }); } -}; +} + +/** + * Check if Leg 3 (ptUSDe → Solana CCIP) has completed. + * Operations in AWAITING_CALLBACK status have Legs 1+2 done, waiting for Leg 3 CCIP. + */ +async function checkLeg3Completion(operation: RebalanceOperation, context: ProcessingContext): Promise { + const { logger, requestId, database: db } = context; + const logContext = { + requestId, + operationId: operation.id, + earmarkId: operation.earmarkId, + originChain: operation.originChainId, + destinationChain: operation.destinationChainId, + }; + + logger.info('Checking Leg 3 CCIP completion (ptUSDe → Solana)', logContext); + + // Get Leg 3 CCIP transaction hash from mainnet transactions + const mainnetTransactionHash = operation.transactions?.[MAINNET_CHAIN_ID]?.transactionHash; + if (!mainnetTransactionHash) { + logger.warn('No Leg 3 CCIP transaction hash found', { + ...logContext, + transactions: operation.transactions, + }); + return; + } + + // Check if Leg 3 CCIP (ptUSDe → Solana) is ready on destination + const ccipAdapter = context.rebalance.getAdapter(SupportedBridge.CCIP) as CCIPBridgeAdapter; + + const leg3Route = { + origin: Number(MAINNET_CHAIN_ID), + destination: Number(SOLANA_CHAINID), + asset: '', + }; + + const isLeg3Ready = await ccipAdapter.readyOnDestination('0', leg3Route, { + transactionHash: mainnetTransactionHash, + status: 'success', + } as ViemTransactionReceipt); + + logger.info('Leg 3 CCIP readiness check', { + ...logContext, + mainnetTransactionHash, + isReady: isLeg3Ready, + route: leg3Route, + }); + + if (isLeg3Ready) { + await db.updateRebalanceOperation(operation.id, { + status: RebalanceOperationStatus.COMPLETED, + }); + + logger.info('All 3 legs completed successfully', { + ...logContext, + mainnetTransactionHash, + finalStatus: 'COMPLETED', + }); + } else { + logger.debug('Leg 3 CCIP still pending', { + ...logContext, + mainnetTransactionHash, + note: 'Waiting for ptUSDe → Solana CCIP to complete', + }); + } +} diff --git a/packages/poller/src/rebalance/tacUsdt.ts b/packages/poller/src/rebalance/tacUsdt.ts index 05bba0eb..62311457 100644 --- a/packages/poller/src/rebalance/tacUsdt.ts +++ b/packages/poller/src/rebalance/tacUsdt.ts @@ -10,7 +10,7 @@ import { safeParseBigInt, getTonAssetDecimals, } from '../helpers'; -import { jsonifyMap, jsonifyError } from '@mark/logger'; +import { Logger, jsonifyMap, jsonifyError } from '@mark/logger'; import { RebalanceOperationStatus, BPS_MULTIPLIER, @@ -20,14 +20,11 @@ import { TAC_CHAIN_ID, TON_LZ_CHAIN_ID, getTokenAddressFromConfig, - WalletType, EarmarkStatus, getDecimalsFromConfig, } from '@mark/core'; import { ProcessingContext } from '../init'; import { getActualAddress } from '../helpers/zodiac'; -import { submitTransactionWithLogging } from '../helpers/transactions'; -import { MemoizedTransactionRequest, RebalanceTransactionMemo } from '@mark/rebalance'; import { createRebalanceOperation, Earmark, @@ -41,15 +38,9 @@ import { const USDT_ON_ETH_ADDRESS = '0xdAC17F958D2ee523a2206206994597C13D831ec7'; const USDT_TICKER_HASH = '0x8b1a1d9c2b109e527c9134b25b1a1833b16b6594f92daa9f6d9b7a6024bce9d0'; -/** - * Sender configuration for TAC rebalancing transactions. - * Specifies which address should sign and send from Ethereum mainnet. - */ -interface TacSenderConfig { - address: string; // Sender's Ethereum address - signerUrl?: string; // Web3signer URL for this sender (uses default if not specified) - label: 'market-maker' | 'fill-service'; // For logging -} +import { isOperationTimedOut, DEFAULT_OPERATION_TTL_MINUTES } from './helpers'; +import { SenderConfig, RebalanceRunState } from './types'; +import { executeEvmBridge } from './bridgeExecution'; /** * Resolved USDT token addresses and decimals for TAC rebalancing. @@ -66,23 +57,6 @@ interface UsdtInfo { // Minimum TON balance required for gas (0.5 TON in nanotons) const MIN_TON_GAS_BALANCE = 500000000n; -// Default operation timeout: 24 hours (in minutes) -const DEFAULT_OPERATION_TTL_MINUTES = 24 * 60; - -/** - * Check if an operation has exceeded its TTL (time-to-live). - * Operations stuck in PENDING or AWAITING_CALLBACK for too long should be marked as failed. - * - * @param createdAt - Operation creation timestamp - * @param ttlMinutes - TTL in minutes (default: 24 hours) - * @returns true if operation has timed out - */ -function isOperationTimedOut(createdAt: Date, ttlMinutes: number = DEFAULT_OPERATION_TTL_MINUTES): boolean { - const maxAgeMs = ttlMinutes * 60 * 1000; - const operationAgeMs = Date.now() - createdAt.getTime(); - return operationAgeMs > maxAgeMs; -} - /** * Type for TAC transaction metadata stored in database * Used for type-safe access to transactionLinker in callbacks @@ -164,6 +138,7 @@ function buildTonApiHeaders(apiKey?: string): Record { */ async function getTonNativeBalance( walletAddress: string, + logger: Logger, apiKey?: string, rpcUrl: string = TONAPI_DEFAULT_URL, ): Promise { @@ -174,17 +149,19 @@ async function getTonNativeBalance( }); if (!response.ok) { + logger.warn('getTonNativeBalance: non-OK response', { walletAddress, status: response.status }); return 0n; } const data = (await response.json()) as { balance?: number | string }; if (data.balance === undefined) { + logger.warn('getTonNativeBalance: balance field undefined in response', { walletAddress }); return 0n; } return safeParseBigInt(data.balance.toString()); } catch (error) { - console.log('getTonNativeBalance error', error); + logger.error('getTonNativeBalance error', { walletAddress, error: jsonifyError(error) }); return 0n; } } @@ -201,6 +178,7 @@ async function getTonNativeBalance( async function getTonJettonBalance( walletAddress: string, jettonAddress: string, + logger: Logger, apiKey?: string, rpcUrl: string = TONAPI_DEFAULT_URL, ): Promise { @@ -211,133 +189,23 @@ async function getTonJettonBalance( }); if (!response.ok) { + logger.warn('getTonJettonBalance: non-OK response', { walletAddress, jettonAddress, status: response.status }); return 0n; } const data = (await response.json()) as { balance?: string }; if (data.balance === undefined) { + logger.warn('getTonJettonBalance: balance field undefined in response', { walletAddress, jettonAddress }); return 0n; } return safeParseBigInt(data.balance); - } catch { + } catch (error) { + logger.error('getTonJettonBalance error', { walletAddress, jettonAddress, error: jsonifyError(error) }); return 0n; } } -type ExecuteBridgeContext = Pick; - -interface ExecuteBridgeParams { - context: ExecuteBridgeContext; - route: { - origin: number; - destination: number; - asset: string; - }; - bridgeType: SupportedBridge; - bridgeTxRequests: MemoizedTransactionRequest[]; - amountToBridge: bigint; - senderOverride?: TacSenderConfig; // Optional: use different sender than config.ownAddress -} - -interface ExecuteBridgeResult { - receipt?: TransactionReceipt; - effectiveBridgedAmount: string; -} - -/** - * Submits a sequence of bridge transactions and returns the final receipt and effective bridged amount. - * @param senderOverride - If provided, uses this address as sender instead of config.ownAddress - */ -const executeBridgeTransactions = async ({ - context, - route, - bridgeType, - bridgeTxRequests, - amountToBridge, - senderOverride, -}: ExecuteBridgeParams): Promise => { - const { logger, chainService, config, requestId } = context; - - // Use sender override if provided, otherwise default to ownAddress - const senderAddress = senderOverride?.address ?? config.ownAddress; - const senderLabel = senderOverride?.label ?? 'market-maker'; - - let idx = -1; - let effectiveBridgedAmount = amountToBridge.toString(); - let receipt: TransactionReceipt | undefined; - - for (const { transaction, memo, effectiveAmount } of bridgeTxRequests) { - idx++; - logger.info('Submitting TAC bridge transaction', { - requestId, - route, - bridgeType, - transactionIndex: idx, - totalTransactions: bridgeTxRequests.length, - transaction, - memo, - amountToBridge, - sender: senderAddress, - senderType: senderLabel, - }); - - const result = await submitTransactionWithLogging({ - chainService, - logger, - chainId: route.origin.toString(), - txRequest: { - to: transaction.to!, - data: transaction.data!, - value: (transaction.value || 0).toString(), - chainId: route.origin, - from: senderAddress, - funcSig: transaction.funcSig || '', - }, - zodiacConfig: { - walletType: WalletType.EOA, - }, - context: { requestId, route, bridgeType, transactionType: memo, sender: senderLabel }, - }); - - logger.info('Successfully submitted TAC bridge transaction', { - requestId, - route, - bridgeType, - transactionIndex: idx, - totalTransactions: bridgeTxRequests.length, - transactionHash: result.hash, - memo, - amountToBridge, - }); - - if (memo !== RebalanceTransactionMemo.Rebalance) { - continue; - } - - receipt = result.receipt! as unknown as TransactionReceipt; - if (effectiveAmount) { - effectiveBridgedAmount = effectiveAmount; - logger.info('Using effective bridged amount from adapter', { - requestId, - originalAmount: amountToBridge.toString(), - effectiveAmount: effectiveBridgedAmount, - bridgeType, - }); - } - } - - return { receipt, effectiveBridgedAmount }; -}; - -/** - * Shared state for tracking ETH USDT that has been committed in this run - * This prevents over-committing when both MM and FS need rebalancing simultaneously - */ -interface RebalanceRunState { - committedEthUsdt: bigint; // Amount of ETH USDT committed in this run (not yet confirmed on-chain) -} - /** * Main TAC USDT rebalancing function * @@ -458,7 +326,7 @@ export async function rebalanceTacUsdt(context: ProcessingContext): Promise 0n) { + if (runState.committedAmount > 0n) { logger.info('MM committed funds, reducing available balance for FS', { requestId, - mmCommitted: runState.committedEthUsdt.toString(), + mmCommitted: runState.committedAmount.toString(), fsAvailable: fsAvailableBalance.toString(), }); } @@ -488,7 +356,7 @@ export async function rebalanceTacUsdt(context: ProcessingContext): Promise 0) { logger.info('Active rebalance in progress for recipient', { requestId, @@ -1052,15 +859,15 @@ const processThresholdRebalancing = async ({ return []; } - // 4. Use available ETH balance (already accounts for committed funds in this run) - // This prevents over-committing when both MM and FS need rebalancing simultaneously - const remainingEthUsdt = availableEthUsdt - runState.committedEthUsdt; + // 4. Use available ETH balance as-is — callers are responsible for deducting + // committedAmount where appropriate (e.g., cross-wallet flow deducts before calling). + const remainingEthUsdt = availableEthUsdt; logger.debug('Threshold rebalancing: checking available balance', { requestId, recipient: recipientAddress, availableEthUsdt: availableEthUsdt.toString(), - alreadyCommitted: runState.committedEthUsdt.toString(), + alreadyCommitted: runState.committedAmount.toString(), remainingEthUsdt: remainingEthUsdt.toString(), shortfall: shortfall.toString(), }); @@ -1091,12 +898,12 @@ const processThresholdRebalancing = async ({ // Track committed funds if bridge was successful if (actions.length > 0) { - runState.committedEthUsdt += amountToBridge; + runState.committedAmount += amountToBridge; logger.debug('Updated committed funds after threshold bridge', { requestId, recipient: recipientAddress, bridgedAmount: amountToBridge.toString(), - totalCommitted: runState.committedEthUsdt.toString(), + totalCommitted: runState.committedAmount.toString(), }); } @@ -1110,11 +917,6 @@ const executeTacBridge = async ( earmarkId: string | null, // null for threshold-based ): Promise => { const { config, chainService, fillServiceChainService, logger, requestId, rebalance, prometheus } = context; - // Existing Stargate bridge logic - // Store recipientAddress in operation.recipient - // Store earmarkId (null for threshold-based) - const actions: RebalanceAction[] = []; - // Determine if this is for Fill Service or Market Maker based on recipient const isForFillService = recipientAddress.toLowerCase() === config.tacRebalance?.fillService?.address?.toLowerCase(); const walletType = isForFillService ? 'fill-service' : 'market-maker'; @@ -1143,7 +945,6 @@ const executeTacBridge = async ( const origin = Number(MAINNET_CHAIN_ID); // Always start from Ethereum mainnet // --- Leg 1: Bridge USDT from Ethereum to TON via Stargate --- - let rebalanceSuccessful = false; const bridgeType = SupportedBridge.Stargate; // Determine sender for the bridge based on recipient type @@ -1154,7 +955,7 @@ const executeTacBridge = async ( config.tacRebalance?.fillService?.senderAddress ?? config.tacRebalance?.fillService?.address; let evmSender: string; - let senderConfig: TacSenderConfig | undefined; + let senderConfig: SenderConfig | undefined; let selectedChainService = chainService; if (isForFillService && fillerSenderAddress && fillServiceChainService) { @@ -1289,7 +1090,7 @@ const executeTacBridge = async ( }); // Use slippage from config (default 500 = 5%) - const slippageDbps = config.tacRebalance!.bridge.slippageDbps; + const slippageDbps = config.tacRebalance!.bridge.slippageDbps ?? 500; const route = { asset: USDT_ON_ETH_ADDRESS, @@ -1318,9 +1119,6 @@ const executeTacBridge = async ( try { // CRITICAL: Convert amount from 18 decimals to native USDT decimals (6) - // The Stargate API expects amounts in native token units, not normalized 18 decimals - // Without this conversion, amounts like "10000000000000000000" (10 USDT in 18 decimals) - // are interpreted as 10 trillion USDT, exceeding pool liquidity and causing "Failed to get route" const ethUsdtDecimals = getDecimalsFromConfig(USDT_TICKER_HASH, origin.toString(), config) ?? 6; const amountInNativeUnits = convertToNativeUnits(amount, ethUsdtDecimals); @@ -1331,99 +1129,29 @@ const executeTacBridge = async ( decimals: ethUsdtDecimals, }); - // Get quote - const receivedAmountStr = await adapter.getReceivedAmount(amountInNativeUnits.toString(), route); - logger.info('Received Stargate quote', { - requestId, - route, - amountToBridge: amountInNativeUnits.toString(), - receivedAmount: receivedAmountStr, - }); - - // Check slippage - use safeParseBigInt for adapter response - // Note: Both receivedAmount and minimumAcceptableAmount are in native units (6 decimals) - const receivedAmount = safeParseBigInt(receivedAmountStr); - // slippagesDbps config uses basis points (500 = 5%), not deci-basis points - const slippageBps = BigInt(route.slippagesDbps[0]); - const minimumAcceptableAmount = amountInNativeUnits - (amountInNativeUnits * slippageBps) / BPS_MULTIPLIER; - - if (receivedAmount < minimumAcceptableAmount) { - logger.warn('Stargate quote does not meet slippage requirements', { - requestId, - route, - amountToBridge: amountInNativeUnits.toString(), - receivedAmount: receivedAmount.toString(), - minimumAcceptableAmount: minimumAcceptableAmount.toString(), - }); - return []; - } - - // Get bridge transactions - // Sender is EVM address, recipient is TON address (for Stargate to deliver to) - const bridgeTxRequests = await adapter.send(evmSender, tonRecipient, amountInNativeUnits.toString(), route); - - if (!bridgeTxRequests.length) { - logger.error('No bridge transactions returned from Stargate adapter', { requestId }); - return []; - } - - logger.info('Prepared Stargate bridge transactions', { - requestId, - route, - transactionCount: bridgeTxRequests.length, - }); - - // Execute bridge transactions using the selected chain service and sender - const { receipt, effectiveBridgedAmount } = await executeBridgeTransactions({ - context: { requestId, logger, chainService: selectedChainService, config }, - route, - bridgeType, - bridgeTxRequests, - amountToBridge: amount, - senderOverride: senderConfig, - }); - - // Create database record for Leg 1 - // Store both TON recipient (for Stargate) and TAC recipient (for Leg 2) - // Note: Use USDT_TICKER_HASH as fallback to ensure we store ticker hash, not address - await createRebalanceOperation({ - earmarkId: earmarkId, - originChainId: route.origin, - destinationChainId: route.destination, - tickerHash: getTickerForAsset(route.asset, route.origin, config) || USDT_TICKER_HASH, - amount: effectiveBridgedAmount, - slippage: route.slippagesDbps[0], - status: RebalanceOperationStatus.PENDING, - bridge: 'stargate-tac', // Tagged for TAC flow - transactions: receipt - ? { - [route.origin]: receipt, - } - : undefined, - recipient: tacRecipient, // Final TAC recipient - }); - - logger.info('Successfully created TAC Leg 1 rebalance operation', { - requestId, + const result = await executeEvmBridge({ + context, + adapter, route, - bridgeType, - originTxHash: receipt?.transactionHash, - amountToBridge: effectiveBridgedAmount, + amount: amountInNativeUnits, + dbAmount: amount, // preserve 18-decimal for DB record + sender: evmSender, + recipient: tonRecipient, + dbRecipient: tacRecipient, + slippageTolerance: BigInt(route.slippagesDbps[0]), + slippageMultiplier: BPS_MULTIPLIER, + chainService: selectedChainService, + senderConfig, + dbRecord: { + earmarkId: earmarkId, + tickerHash: getTickerForAsset(route.asset, route.origin, config) || USDT_TICKER_HASH, + bridgeTag: 'stargate-tac', + status: RebalanceOperationStatus.PENDING, + }, + label: 'TAC threshold Stargate', }); - // Track the operation - const rebalanceAction: RebalanceAction = { - bridge: adapter.type(), - amount: amount.toString(), - origin: route.origin, - destination: route.destination, - asset: route.asset, - transaction: receipt?.transactionHash || '', - recipient: tacRecipient, // Final TAC destination - }; - actions.push(rebalanceAction); - - rebalanceSuccessful = true; + return result.actions; } catch (error) { logger.error('Failed to execute Stargate bridge', { requestId, @@ -1433,22 +1161,6 @@ const executeTacBridge = async ( }); return []; } - - if (rebalanceSuccessful) { - logger.info('Leg 1 rebalance successful', { - requestId, - route, - amount: amount.toString(), - }); - } else { - logger.warn('Failed to complete Leg 1 rebalance', { - requestId, - route, - amount: amount.toString(), - }); - } - - return actions; }; /** @@ -1478,16 +1190,24 @@ const evaluateFillServiceRebalance = async ( return []; } - // Check for pending FS rebalancing operations - const { operations: inFlightOps } = await db.getRebalanceOperations(undefined, undefined, { - status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], - bridge: [`${SupportedBridge.Stargate}-tac`, SupportedBridge.TacInner], - }); + // Check for pending FS rebalancing operations (scoped to FS recipient, not global) + const [fsInFlightTac, fsInFlightTon] = await Promise.all([ + db.getRebalanceOperationByRecipient(Number(TAC_CHAIN_ID), fsConfig.address!, [ + RebalanceOperationStatus.PENDING, + RebalanceOperationStatus.AWAITING_CALLBACK, + ]), + db.getRebalanceOperationByRecipient(Number(TON_LZ_CHAIN_ID), fsConfig.address!, [ + RebalanceOperationStatus.PENDING, + RebalanceOperationStatus.AWAITING_CALLBACK, + ]), + ]); + const inFlightOps = [...fsInFlightTac, ...fsInFlightTon]; if (inFlightOps.length > 0) { - logger.info('TAC in-flight rebalance operations exist. skipping...', { + logger.info('TAC FS in-flight rebalance operations exist, skipping', { requestId, inFlightOps: inFlightOps.length, + recipient: fsConfig.address, }); return []; } @@ -1645,12 +1365,12 @@ const evaluateFillServiceRebalance = async ( } // Check if MM has funds available - const mmRemainingBalance = mmAvailableEthUsdt - runState.committedEthUsdt; + const mmRemainingBalance = mmAvailableEthUsdt - runState.committedAmount; if (mmRemainingBalance < minRebalance18) { logger.info('Cross-wallet rebalancing: MM has insufficient available funds', { requestId, mmAvailableEthUsdt: mmAvailableEthUsdt.toString(), - committed: runState.committedEthUsdt.toString(), + committed: runState.committedAmount.toString(), mmRemainingBalance: mmRemainingBalance.toString(), minRebalance: minRebalance18.toString(), }); @@ -1706,7 +1426,7 @@ const calculateMinExpectedAmount = (amount: bigint, slippageBps: number): bigint * - Each flow only bridges its own operation-specific amount * - This prevents mixing funds from multiple concurrent flows */ -const executeTacCallbacks = async (context: ProcessingContext): Promise => { +export const executeTacCallbacks = async (context: ProcessingContext): Promise => { const { logger, requestId, config, rebalance, database: db } = context; logger.info('Executing TAC USDT rebalance callbacks', { requestId }); @@ -1950,7 +1670,7 @@ const executeTacCallbacks = async (context: ProcessingContext): Promise => const tonUSDTDecimals = getTonAssetDecimals(operation.tickerHash, config) ?? 6; // Check TON native balance for gas - const tonNativeBalance = await getTonNativeBalance(tonWalletAddress, tonApiKey); + const tonNativeBalance = await getTonNativeBalance(tonWalletAddress, logger, tonApiKey); if (tonNativeBalance < MIN_TON_GAS_BALANCE) { logger.error('Insufficient TON balance for gas', { ...logContext, @@ -1963,7 +1683,7 @@ const executeTacCallbacks = async (context: ProcessingContext): Promise => } // Get actual USDT balance on TON - const actualUsdtBalance = await getTonJettonBalance(tonWalletAddress, jettonAddress, tonApiKey); + const actualUsdtBalance = await getTonJettonBalance(tonWalletAddress, jettonAddress, logger, tonApiKey); const actualUsdtBalance18 = convertTo18Decimals(actualUsdtBalance, tonUSDTDecimals); logger.info('Ton Jetton Balance', { tonWalletAddress, @@ -2163,7 +1883,7 @@ const executeTacCallbacks = async (context: ProcessingContext): Promise => if (tonMnemonic && tonWalletAddress) { // Get actual USDT balance on TON - const actualUsdtBalance = await getTonJettonBalance(tonWalletAddress, jettonAddress, tonApiKey); + const actualUsdtBalance = await getTonJettonBalance(tonWalletAddress, jettonAddress, logger, tonApiKey); const actualUsdtBalance18 = convertTo18Decimals(actualUsdtBalance, tonUSDTDecimals); if (actualUsdtBalance === 0n) { @@ -2173,7 +1893,7 @@ const executeTacCallbacks = async (context: ProcessingContext): Promise => } else { // TON has USDT - try to execute the bridge // First check TON gas balance - const tonNativeBalance = await getTonNativeBalance(tonWalletAddress, tonApiKey); + const tonNativeBalance = await getTonNativeBalance(tonWalletAddress, logger, tonApiKey); if (tonNativeBalance < MIN_TON_GAS_BALANCE) { logger.error('Insufficient TON balance for gas (retry)', { ...logContext, diff --git a/packages/poller/src/rebalance/thresholdEngine.ts b/packages/poller/src/rebalance/thresholdEngine.ts new file mode 100644 index 00000000..69ef83da --- /dev/null +++ b/packages/poller/src/rebalance/thresholdEngine.ts @@ -0,0 +1,203 @@ +/** + * Generic threshold-based rebalancing engine. + * + * Captures the shared orchestration pattern used across all threshold rebalancers: + * check in-flight → get recipient balance → threshold compare → compute shortfall → + * get sender balance → apply min/max caps → execute bridge → track committed amount + * + * Each rebalancer provides a descriptor with callbacks for the parts that differ + * (balance fetching, bridge execution, decimal conversion, etc.). + */ +import { RebalanceAction } from '@mark/core'; +import { jsonifyError } from '@mark/logger'; +import { ProcessingContext } from '../init'; +import { RebalanceRunState } from './types'; + +export interface ThresholdRebalanceDescriptor { + /** Human-readable name for logging (e.g., 'aManUSDe', 'mETH', 'Solana ptUSDe') */ + name: string; + + /** Whether this threshold rebalancer is enabled */ + isEnabled: () => boolean; + + /** Check for in-flight operations that would prevent a new rebalance. Return true to skip. */ + hasInFlightOperations: (context: ProcessingContext) => Promise; + + /** Get recipient's balance on destination chain (in normalized 18-decimal units) */ + getRecipientBalance: (context: ProcessingContext) => Promise; + + /** Threshold and target (in same units as recipient balance) */ + getThresholds: () => { threshold: bigint; target: bigint }; + + /** + * Convert a shortfall (in recipient-balance units) to the amount the sender needs to bridge. + * For same-decimal tokens, this is identity. For cross-decimal or pricing-dependent flows + * (e.g., Pendle USDC→ptUSDe), this performs the conversion. + */ + convertShortfallToBridgeAmount: (shortfall: bigint, context: ProcessingContext) => Promise; + + /** Get sender's available balance on origin chain (in bridge-amount units) */ + getSenderBalance: (context: ProcessingContext) => Promise; + + /** Min/max caps on bridge amount (in bridge-amount units) */ + getAmountCaps: () => { min: bigint; max?: bigint }; + + /** Execute the bridge for the given amount. Returns the resulting actions. */ + executeBridge: (context: ProcessingContext, amount: bigint) => Promise; +} + +/** + * Run the threshold rebalance evaluation using the provided descriptor. + * + * Returns the actions taken (empty if no rebalance was needed or possible). + */ +export async function runThresholdRebalance( + context: ProcessingContext, + descriptor: ThresholdRebalanceDescriptor, + runState: RebalanceRunState, +): Promise { + const { logger, requestId } = context; + const name = descriptor.name; + + // 1. Check if enabled + if (!descriptor.isEnabled()) { + logger.debug(`${name} threshold rebalancing disabled`, { requestId }); + return []; + } + + // 2. Check for in-flight operations + try { + const hasInFlight = await descriptor.hasInFlightOperations(context); + if (hasInFlight) { + logger.info(`${name} has in-flight operations, skipping threshold rebalance`, { requestId }); + return []; + } + } catch (error) { + logger.error(`${name} failed to check in-flight operations`, { requestId, error: jsonifyError(error) }); + return []; + } + + // 3. Get recipient balance + let recipientBalance: bigint; + try { + recipientBalance = await descriptor.getRecipientBalance(context); + } catch (error) { + logger.warn(`${name} failed to get recipient balance`, { requestId, error: jsonifyError(error) }); + return []; + } + + // 4. Threshold comparison + const { threshold, target } = descriptor.getThresholds(); + + if (target < threshold) { + logger.error( + `${name} misconfiguration: target (${target.toString()}) is less than threshold (${threshold.toString()})`, + { + requestId, + threshold: threshold.toString(), + target: target.toString(), + }, + ); + return []; + } + + logger.info(`${name} threshold check`, { + requestId, + recipientBalance: recipientBalance.toString(), + threshold: threshold.toString(), + target: target.toString(), + committedAmount: runState.committedAmount.toString(), + }); + + if (recipientBalance >= threshold) { + logger.info(`${name} recipient balance above threshold, no rebalance needed`, { + requestId, + recipientBalance: recipientBalance.toString(), + threshold: threshold.toString(), + }); + return []; + } + + // 5. Compute shortfall and convert to bridge amount + // Clamp to 0n to guard against edge cases where recipientBalance > target but < threshold + const shortfall = recipientBalance < target ? target - recipientBalance : 0n; + if (shortfall === 0n) { + logger.info(`${name} recipient balance above target, no shortfall`, { requestId }); + return []; + } + let bridgeAmount: bigint; + try { + bridgeAmount = await descriptor.convertShortfallToBridgeAmount(shortfall, context); + } catch (error) { + logger.warn(`${name} failed to convert shortfall to bridge amount`, { requestId, error: jsonifyError(error) }); + return []; + } + + // 6. Get sender balance + let senderBalance: bigint; + try { + senderBalance = await descriptor.getSenderBalance(context); + } catch (error) { + logger.warn(`${name} failed to get sender balance`, { requestId, error: jsonifyError(error) }); + return []; + } + + // 7. Calculate amount: min(bridgeAmount, senderBalance) + let amount = senderBalance < bridgeAmount ? senderBalance : bridgeAmount; + + if (senderBalance < bridgeAmount) { + logger.warn(`${name} sender has insufficient balance to cover full shortfall`, { + requestId, + senderBalance: senderBalance.toString(), + bridgeAmount: bridgeAmount.toString(), + note: 'Will bridge available balance if above minimum', + }); + } + + // 8. Apply caps + const { min, max } = descriptor.getAmountCaps(); + if (max && max > 0n && amount > max) { + amount = max; + } + if (amount < min) { + logger.warn(`${name} available amount below minimum rebalance threshold, skipping`, { + requestId, + availableAmount: amount.toString(), + minRebalance: min.toString(), + }); + return []; + } + + logger.info(`${name} threshold rebalance triggered`, { + requestId, + shortfall: shortfall.toString(), + bridgeAmount: bridgeAmount.toString(), + senderBalance: senderBalance.toString(), + amount: amount.toString(), + }); + + // 9. Execute bridge + let actions: RebalanceAction[]; + try { + actions = await descriptor.executeBridge(context, amount); + } catch (error) { + logger.error(`${name} failed to execute bridge`, { + requestId, + amount: amount.toString(), + error: jsonifyError(error), + }); + return []; + } + + // 10. Track committed amount + if (actions.length > 0) { + runState.committedAmount += amount; + logger.debug(`${name} updated committed amount`, { + requestId, + bridgedAmount: amount.toString(), + totalCommitted: runState.committedAmount.toString(), + }); + } + + return actions; +} diff --git a/packages/poller/src/rebalance/types.ts b/packages/poller/src/rebalance/types.ts new file mode 100644 index 00000000..c62db855 --- /dev/null +++ b/packages/poller/src/rebalance/types.ts @@ -0,0 +1,21 @@ +/** + * Shared types for rebalancer modules. + */ + +/** + * Sender configuration for rebalancing transactions. + * Specifies which address should sign and send from origin chain. + */ +export interface SenderConfig { + address: string; // Sender's chain address + signerUrl?: string; // Web3signer URL for this sender (uses default if not specified) + label: 'market-maker' | 'fill-service'; // For logging +} + +/** + * Shared state for tracking funds committed in a single rebalance run. + * Prevents over-committing when multiple wallets need rebalancing simultaneously. + */ +export interface RebalanceRunState { + committedAmount: bigint; +} diff --git a/packages/poller/test/rebalance/aaveTokenRebalancer.spec.ts b/packages/poller/test/rebalance/aaveTokenRebalancer.spec.ts new file mode 100644 index 00000000..ce0c0501 --- /dev/null +++ b/packages/poller/test/rebalance/aaveTokenRebalancer.spec.ts @@ -0,0 +1,1089 @@ +import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; +import { stub, createStubInstance, SinonStubbedInstance, SinonStub, restore } from 'sinon'; +import { Logger } from '@mark/logger'; +import { ChainService } from '@mark/chainservice'; +import { + MarkConfiguration, + SupportedBridge, + RebalanceOperationStatus, + MAINNET_CHAIN_ID, + MANTLE_CHAIN_ID, + PostBridgeActionType, + TokenRebalanceConfig, + WalletType, +} from '@mark/core'; +import { ProcessingContext } from '../../src/init'; +import { createDatabaseMock } from '../mocks/database'; +import { mockConfig } from '../mocks'; +import { AaveTokenFlowDescriptor } from '../../src/rebalance/aaveTokenRebalancer'; + +// --- Mocks --- + +jest.mock('../../src/helpers', () => { + const actual = jest.requireActual('../../src/helpers'); + return { + ...actual, + getEvmBalance: jest.fn().mockResolvedValue(0n), + safeParseBigInt: jest.fn((value: string | undefined) => { + if (!value) return 0n; + try { + return BigInt(value); + } catch { + return 0n; + } + }), + convertToNativeUnits: jest.fn((amount: bigint, decimals?: number) => { + const targetDecimals = decimals ?? 18; + if (targetDecimals === 18) return amount; + const divisor = BigInt(10 ** (18 - targetDecimals)); + return amount / divisor; + }), + }; +}); + +jest.mock('../../src/helpers/zodiac', () => ({ + getValidatedZodiacConfig: jest.fn().mockReturnValue({ walletType: 'EOA' }), + getActualOwner: jest.fn((_zodiacConfig: unknown, ownAddress: string) => ownAddress), +})); + +jest.mock('../../src/helpers/transactions', () => ({ + submitTransactionWithLogging: jest.fn(() => + Promise.resolve({ + hash: '0xtestHash', + receipt: { + transactionHash: '0xtestHash', + blockNumber: 1000n, + blockHash: '0xblockhash', + from: '0xfrom', + to: '0xto', + cumulativeGasUsed: 100000n, + effectiveGasPrice: 1000000000n, + gasUsed: 50000n, + status: 'success', + contractAddress: null, + logs: [], + logsBloom: '0x', + transactionIndex: 0, + type: 'legacy', + }, + }), + ), +})); + +jest.mock('@mark/core', () => { + const actual = jest.requireActual('@mark/core'); + return { + ...actual, + getDecimalsFromConfig: jest.fn((tickerHash: string, _domain: string) => { + // USDC: 6 decimals + if (tickerHash === '0xusdc_ticker') return 6; + // aToken: 18 decimals (default for aManUSDe) or 6 for aMansyrupUSDT + if (tickerHash === '0xatoken_ticker') return 18; + if (tickerHash === '0xatoken_ticker_6dec') return 6; + // Intermediate token + if (tickerHash === '0xintermediate_ticker') return 18; + return undefined; + }), + getTokenAddressFromConfig: jest.fn((tickerHash: string, domain: string) => { + if (tickerHash === '0xusdc_ticker' && domain === MAINNET_CHAIN_ID) return '0xUSDC_MAINNET'; + if (tickerHash === '0xusdc_ticker' && domain === MANTLE_CHAIN_ID) return '0xUSDC_MANTLE'; + if (tickerHash === '0xatoken_ticker' && domain === MANTLE_CHAIN_ID) return '0xATOKEN_MANTLE'; + if (tickerHash === '0xatoken_ticker_6dec' && domain === MANTLE_CHAIN_ID) return '0xATOKEN_MANTLE_6DEC'; + if (tickerHash === '0xintermediate_ticker' && domain === MANTLE_CHAIN_ID) return '0xINTERMEDIATE_MANTLE'; + return undefined; + }), + }; +}); + +jest.mock('@mark/database', () => ({ + createRebalanceOperation: jest.fn().mockResolvedValue({ id: 'mock-op-id' }), + getRebalanceOperations: jest.fn().mockResolvedValue({ operations: [], total: 0 }), + updateRebalanceOperation: jest.fn().mockResolvedValue({ id: 'mock-op-id' }), + initializeDatabase: jest.fn(), + getPool: jest.fn(), + closeDatabase: jest.fn(), +})); + +jest.mock('@mark/rebalance', () => ({ + RebalanceTransactionMemo: { + Rebalance: 'rebalance', + Approval: 'approval', + AaveSupply: 'aave-supply', + DexSwap: 'dex-swap', + }, + buildTransactionsForAction: jest.fn().mockResolvedValue([]), + RebalanceAdapter: jest.fn(), +})); + +// Import after mocks +import { + rebalanceAaveToken, + executeAaveTokenCallbacks, + evaluateThresholdRebalance, + executeStargateBridgeForAaveToken, +} from '../../src/rebalance/aaveTokenRebalancer'; +import { rebalanceAManUsde, executeAManUsdeCallbacks } from '../../src/rebalance/aManUsde'; +import { rebalanceAMansyrupUsdt, executeAMansyrupUsdtCallbacks } from '../../src/rebalance/aMansyrupUsdt'; +import * as database from '@mark/database'; +import { getEvmBalance } from '../../src/helpers'; +import { submitTransactionWithLogging } from '../../src/helpers/transactions'; +import { buildTransactionsForAction } from '@mark/rebalance'; +import { getDecimalsFromConfig, getTokenAddressFromConfig } from '@mark/core'; + +// --- Test helpers --- + +function createMockDescriptor(overrides?: Partial): AaveTokenFlowDescriptor { + return { + name: 'TestAaveToken', + aTokenTickerHash: '0xatoken_ticker', + intermediateTokenTickerHash: '0xintermediate_ticker', + sourceTokenTickerHash: '0xusdc_ticker', + bridgeTag: 'stargate-testaave', + getConfig: (config) => config.aManUsdeRebalance, + buildPostBridgeActions: ({ sourceTokenOnMantle, intermediateTokenOnMantle, aavePoolAddress, dexSwapSlippageBps }) => [ + { + type: PostBridgeActionType.DexSwap as const, + sellToken: sourceTokenOnMantle, + buyToken: intermediateTokenOnMantle, + slippageBps: dexSwapSlippageBps, + }, + { + type: PostBridgeActionType.AaveSupply as const, + poolAddress: aavePoolAddress, + supplyAsset: intermediateTokenOnMantle, + }, + ], + getAavePoolAddress: () => '0xAavePool', + getDexSwapSlippageBps: () => 100, + ...overrides, + }; +} + +function createMockTokenRebalanceConfig(overrides?: Partial): TokenRebalanceConfig { + return { + enabled: true, + marketMaker: { + address: '0xMM', + onDemandEnabled: false, + thresholdEnabled: false, + }, + fillService: { + address: '0xFS_RECEIVER', + senderAddress: '0xFS_SENDER', + thresholdEnabled: true, + threshold: '500000000000000000000', // 500 in 18 dec + targetBalance: '1000000000000000000000', // 1000 in 18 dec + }, + bridge: { + slippageDbps: 500, + minRebalanceAmount: '1000000', // 1 USDC in 6 dec + maxRebalanceAmount: '100000000', // 100 USDC in 6 dec + }, + ...overrides, + }; +} + +describe('Aave Token Rebalancer', () => { + let mockLogger: SinonStubbedInstance; + let mockChainService: SinonStubbedInstance; + let mockRebalanceAdapter: { isPaused: SinonStub; getAdapter: SinonStub }; + let mockDatabase: ReturnType; + let baseConfig: MarkConfiguration; + let mockContext: ProcessingContext; + + const MOCK_REQUEST_ID = 'aave-test-request'; + const MOCK_OWN_ADDRESS = '0x1234567890123456789012345678901234567890'; + + beforeEach(() => { + jest.clearAllMocks(); + + mockLogger = createStubInstance(Logger); + mockChainService = createStubInstance(ChainService); + mockRebalanceAdapter = { + isPaused: stub().resolves(false), + getAdapter: stub().returns(undefined), + }; + mockDatabase = createDatabaseMock(); + + baseConfig = { + ...mockConfig, + ownAddress: MOCK_OWN_ADDRESS, + aManUsdeRebalance: createMockTokenRebalanceConfig(), + } as MarkConfiguration; + + mockContext = { + config: baseConfig, + requestId: MOCK_REQUEST_ID, + startTime: Date.now(), + logger: mockLogger, + chainService: mockChainService, + fillServiceChainService: mockChainService, + rebalance: mockRebalanceAdapter, + database: mockDatabase, + everclear: { fetchIntents: stub().resolves([]) }, + prometheus: {} as any, + } as unknown as ProcessingContext; + + // Default database behavior + (database.getRebalanceOperations as jest.Mock).mockResolvedValue({ operations: [], total: 0 }); + (database.createRebalanceOperation as jest.Mock).mockResolvedValue({ id: 'mock-op-id' }); + (database.updateRebalanceOperation as jest.Mock).mockResolvedValue({ id: 'mock-op-id' }); + }); + + afterEach(() => { + restore(); + jest.clearAllMocks(); + }); + + // ========================================== + // A. rebalanceAaveToken (main entry) + // ========================================== + describe('rebalanceAaveToken', () => { + const descriptor = createMockDescriptor(); + + it('should return empty when config is disabled', async () => { + const config = { + ...baseConfig, + aManUsdeRebalance: { ...createMockTokenRebalanceConfig(), enabled: false }, + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await rebalanceAaveToken(ctx, descriptor); + + expect(result).toEqual([]); + expect(mockLogger.debug.calledWithMatch('TestAaveToken rebalancing disabled')).toBe(true); + }); + + it('should return empty when rebalance adapter is paused', async () => { + mockRebalanceAdapter.isPaused.resolves(true); + + const result = await rebalanceAaveToken(mockContext, descriptor); + + expect(result).toEqual([]); + expect(mockLogger.warn.calledWithMatch('Rebalance loop is paused')).toBe(true); + }); + + it('should return empty when fillService.address is missing', async () => { + const config = { + ...baseConfig, + aManUsdeRebalance: createMockTokenRebalanceConfig({ + fillService: { + address: undefined, + thresholdEnabled: true, + }, + }), + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await rebalanceAaveToken(ctx, descriptor); + + expect(result).toEqual([]); + expect(mockLogger.error.calledWithMatch('rebalance configuration validation failed')).toBe(true); + }); + + it('should return empty when bridge.minRebalanceAmount is missing', async () => { + const rebalanceConfig = createMockTokenRebalanceConfig(); + (rebalanceConfig.bridge as any).minRebalanceAmount = undefined; + const config = { + ...baseConfig, + aManUsdeRebalance: rebalanceConfig, + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await rebalanceAaveToken(ctx, descriptor); + + expect(result).toEqual([]); + expect(mockLogger.error.calledWithMatch('rebalance configuration validation failed')).toBe(true); + }); + + it('should call executeAaveTokenCallbacks before threshold evaluation', async () => { + // Config disabled means we return early after callbacks + const config = { + ...baseConfig, + aManUsdeRebalance: { ...createMockTokenRebalanceConfig(), enabled: false }, + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + await rebalanceAaveToken(ctx, descriptor); + + // Verify callbacks were executed (the callback function queries for operations) + expect( + (mockDatabase.getRebalanceOperations as SinonStub).calledWithMatch(undefined, undefined, { + bridge: 'stargate-testaave', + }), + ).toBe(true); + }); + + it('should return actions from evaluateThresholdRebalance', async () => { + // Set up a scenario where threshold rebalancing returns actions + (getEvmBalance as jest.Mock).mockResolvedValueOnce(0n); // aToken balance = 0 (below threshold) + (getEvmBalance as jest.Mock).mockResolvedValueOnce( + BigInt('2000000000000000000000'), // sender has 2000 USDC in 18 decimals + ); + + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('99600000'), // passes slippage: min = 100M - 500k = 99.5M + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '100000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await rebalanceAaveToken(mockContext, descriptor); + + expect(result.length).toBe(1); + expect(result[0].bridge).toBe(SupportedBridge.Stargate); + }); + }); + + // ========================================== + // B. evaluateThresholdRebalance + // ========================================== + describe('evaluateThresholdRebalance', () => { + const descriptor = createMockDescriptor(); + const makeRunState = () => ({ committedAmount: 0n }); + + it('should return empty when thresholdEnabled is false', async () => { + const config = { + ...baseConfig, + aManUsdeRebalance: createMockTokenRebalanceConfig({ + fillService: { + address: '0xFS_RECEIVER', + thresholdEnabled: false, + }, + }), + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await evaluateThresholdRebalance(ctx, descriptor, makeRunState()); + + expect(result).toEqual([]); + }); + + it('should return empty when in-flight operations exist for this bridge tag', async () => { + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [{ id: 'existing-op', status: RebalanceOperationStatus.PENDING }], + total: 1, + }); + + const result = await evaluateThresholdRebalance(mockContext, descriptor, makeRunState()); + + expect(result).toEqual([]); + expect(mockLogger.info.calledWithMatch('in-flight')).toBe(true); + }); + + it('should return empty when aToken not found in chain config', async () => { + const badDescriptor = createMockDescriptor({ aTokenTickerHash: '0xnonexistent' }); + + const result = await evaluateThresholdRebalance(mockContext, badDescriptor, makeRunState()); + + expect(result).toEqual([]); + expect(mockLogger.warn.calledWithMatch('failed to get recipient balance')).toBe(true); + }); + + it('should return empty when aToken balance >= threshold', async () => { + // Balance of 600 in 18 dec, threshold is 500 + (getEvmBalance as jest.Mock).mockResolvedValueOnce(BigInt('600000000000000000000')); + + const result = await evaluateThresholdRebalance(mockContext, descriptor, makeRunState()); + + expect(result).toEqual([]); + expect(mockLogger.info.calledWithMatch('no rebalance needed')).toBe(true); + }); + + it('should return empty when shortfall < minRebalanceAmount', async () => { + // Balance just slightly below threshold — shortfall is tiny + // threshold = 500e18, target = 1000e18, balance = 999.9999e18 → shortfall ≈ 0.0001e18 → in USDC: ~0 + (getEvmBalance as jest.Mock).mockResolvedValueOnce(BigInt('999999900000000000000')); + + const result = await evaluateThresholdRebalance(mockContext, descriptor, makeRunState()); + + expect(result).toEqual([]); + }); + + it('should return empty when sender balance < minRebalanceAmount after conversion', async () => { + // aToken balance = 0, so shortfall is large, but sender has very little USDC + (getEvmBalance as jest.Mock).mockResolvedValueOnce(0n); // aToken = 0 + (getEvmBalance as jest.Mock).mockResolvedValueOnce(BigInt('100000000000')); // sender = 0.0001 USDC in 18 dec + + const result = await evaluateThresholdRebalance(mockContext, descriptor, makeRunState()); + + expect(result).toEqual([]); + }); + + it('should cap amount at maxRebalanceAmount', async () => { + // aToken balance = 0, shortfall = 1000 in USDC (1000e6), sender has plenty + // maxRebalanceAmount = 100e6 USDC + (getEvmBalance as jest.Mock).mockResolvedValueOnce(0n); // aToken = 0 + (getEvmBalance as jest.Mock).mockResolvedValueOnce( + BigInt('5000000000000000000000'), // sender = 5000 USDC in 18 dec + ); + + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('99600000'), // passes slippage: min = 100M - 500k = 99.5M + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '100000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await evaluateThresholdRebalance(mockContext, descriptor, makeRunState()); + + // Should have capped at 100e6 (maxRebalanceAmount) + expect(result.length).toBe(1); + expect(result[0].amount).toBe('100000000'); + }); + + it('should bridge shortfall amount when shortfall < sender balance', async () => { + // aToken balance = 800e18, threshold = 900e18 (below → triggers), target = 1000e18 + // Shortfall = 1000e18 - 800e18 = 200e18 → 200e6 USDC. Sender has 5000 USDC. + // maxRebalanceAmount = 500e6 — higher than shortfall, so bridge 200e6 + const config = { + ...baseConfig, + aManUsdeRebalance: createMockTokenRebalanceConfig({ + fillService: { + address: '0xFS_RECEIVER', + senderAddress: '0xFS_SENDER', + thresholdEnabled: true, + threshold: '900000000000000000000', // 900 in 18 dec — balance 800 is below this + targetBalance: '1000000000000000000000', + }, + bridge: { + slippageDbps: 500, + minRebalanceAmount: '1000000', + maxRebalanceAmount: '500000000', // 500 USDC — higher than shortfall + }, + }), + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + (getEvmBalance as jest.Mock).mockResolvedValueOnce(BigInt('800000000000000000000')); // aToken = 800 + (getEvmBalance as jest.Mock).mockResolvedValueOnce( + BigInt('5000000000000000000000'), // sender = 5000 USDC in 18 dec + ); + + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('199500000'), // passes slippage: min = 200M - 1M = 199M + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '200000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await evaluateThresholdRebalance(ctx, descriptor, makeRunState()); + + expect(result.length).toBe(1); + // Shortfall = 200e18 → 200e6 in USDC, sender has 5000e6. Bridge 200e6. + expect(result[0].amount).toBe('200000000'); + }); + + it('should bridge sender balance when sender balance < shortfall', async () => { + // aToken = 0, shortfall = 1000e18 → 1000e6 USDC, sender has only 50 USDC + const config = { + ...baseConfig, + aManUsdeRebalance: createMockTokenRebalanceConfig({ + bridge: { + slippageDbps: 500, + minRebalanceAmount: '1000000', // 1 USDC + }, + }), + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + (getEvmBalance as jest.Mock) + .mockResolvedValueOnce(0n) // aToken = 0 + .mockResolvedValueOnce(BigInt('50000000000000000000')); // sender = 50 USDC in 18 dec + + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('49800000'), // passes slippage: min = 50M - 250k = 49.75M + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '50000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await evaluateThresholdRebalance(ctx, descriptor, makeRunState()); + + expect(result.length).toBe(1); + // sender has 50e6 USDC which is less than 1000e6 shortfall → bridge 50e6 + expect(result[0].amount).toBe('50000000'); + }); + + it('should correctly convert between different decimal aToken and source token', async () => { + // Test with 6-decimal aToken (like aMansyrupUSDT) + const descriptor6dec = createMockDescriptor({ + aTokenTickerHash: '0xatoken_ticker_6dec', + }); + + const config = { + ...baseConfig, + aManUsdeRebalance: createMockTokenRebalanceConfig({ + fillService: { + address: '0xFS_RECEIVER', + senderAddress: '0xFS_SENDER', + thresholdEnabled: true, + // With 6-decimal aToken, getEvmBalance normalizes to 18 dec + threshold: '500000000000000000000', // 500 in 18 dec + targetBalance: '1000000000000000000000', // 1000 in 18 dec + }, + bridge: { + slippageDbps: 500, + minRebalanceAmount: '1000000', + maxRebalanceAmount: undefined, + }, + }), + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + // Balance is 0 → shortfall is 1000e18 → 1000e6 in USDC + (getEvmBalance as jest.Mock).mockResolvedValueOnce(0n); + (getEvmBalance as jest.Mock).mockResolvedValueOnce( + BigInt('2000000000000000000000'), // sender = 2000 USDC in 18 dec + ); + + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('999000000'), // passes slippage: min = 1000M - 5M = 995M + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '1000000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await evaluateThresholdRebalance(ctx, descriptor6dec, makeRunState()); + + expect(result.length).toBe(1); + }); + }); + + // ========================================== + // C. executeStargateBridgeForAaveToken + // ========================================== + describe('executeStargateBridgeForAaveToken', () => { + const descriptor = createMockDescriptor(); + + it('should return empty when Stargate adapter not found', async () => { + mockRebalanceAdapter.getAdapter.returns(undefined); + + const result = await executeStargateBridgeForAaveToken( + mockContext, + descriptor, + '0xSender', + '0xRecipient', + 1000000n, + ); + + expect(result).toEqual([]); + expect(mockLogger.error.calledWithMatch('Stargate adapter not found')).toBe(true); + }); + + it('should return empty when quote fails slippage check', async () => { + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('1'), // Way too low + send: stub().resolves([]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await executeStargateBridgeForAaveToken( + mockContext, + descriptor, + '0xSender', + '0xRecipient', + 1000000n, + ); + + expect(result).toEqual([]); + expect(mockLogger.warn.calledWithMatch('does not meet slippage requirements')).toBe(true); + }); + + it('should submit transactions and create DB record with correct bridge tag', async () => { + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('995000'), + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xBridgeData', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '1000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await executeStargateBridgeForAaveToken( + mockContext, + descriptor, + '0xSender', + '0xRecipient', + 1000000n, + ); + + expect(result.length).toBe(1); + expect(result[0].bridge).toBe(SupportedBridge.Stargate); + expect(result[0].recipient).toBe('0xRecipient'); + + // Verify DB record was created with correct bridge tag + expect(database.createRebalanceOperation).toHaveBeenCalledWith( + expect.objectContaining({ + bridge: 'stargate-testaave', + tickerHash: '0xusdc_ticker', + status: RebalanceOperationStatus.PENDING, + }), + ); + }); + + it('should return RebalanceAction with correct fields', async () => { + const mockStargateAdapter = { + type: stub().returns(SupportedBridge.Stargate), + getReceivedAmount: stub().resolves('996000'), // passes slippage: min = 1M - 5k = 995k + send: stub().resolves([ + { + transaction: { to: '0xStargate', data: '0xdata', funcSig: 'send', value: 0 }, + memo: 'rebalance', + effectiveAmount: '1000000', + }, + ]), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + const result = await executeStargateBridgeForAaveToken( + mockContext, + descriptor, + '0xSender', + '0xRecipient', + 1000000n, + ); + + expect(result.length).toBe(1); + expect(result[0]).toEqual( + expect.objectContaining({ + bridge: SupportedBridge.Stargate, + amount: '1000000', + origin: 1, + destination: 5000, + asset: '0xUSDC_MAINNET', + recipient: '0xRecipient', + }), + ); + }); + }); + + // ========================================== + // D. executeAaveTokenCallbacks + // ========================================== + describe('executeAaveTokenCallbacks', () => { + const descriptor = createMockDescriptor(); + + it('should cancel timed-out operations', async () => { + const expiredDate = new Date(Date.now() - 25 * 60 * 60 * 1000); // 25 hours ago + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-expired', + status: RebalanceOperationStatus.PENDING, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + createdAt: expiredDate, + transactions: {}, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + expect((mockDatabase.updateRebalanceOperation as SinonStub).calledWith('op-expired', { + status: RebalanceOperationStatus.CANCELLED, + })).toBe(true); + }); + + it('should transition PENDING -> AWAITING_CALLBACK when readyOnDestination is true', async () => { + const mockStargateAdapter = { + readyOnDestination: stub().resolves(true), + destinationCallback: stub().resolves(null), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-pending', + status: RebalanceOperationStatus.PENDING, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: { + 1: { + transactionHash: '0xOriginTx', + metadata: { receipt: { transactionHash: '0xOriginTx' } }, + }, + }, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + // Should update to AWAITING_CALLBACK first, then to AWAITING_POST_BRIDGE + const updateCalls = (mockDatabase.updateRebalanceOperation as SinonStub).getCalls(); + expect(updateCalls.some((call) => call.args[1].status === RebalanceOperationStatus.AWAITING_CALLBACK)).toBe( + true, + ); + }); + + it('should stay PENDING when readyOnDestination is false', async () => { + const mockStargateAdapter = { + readyOnDestination: stub().resolves(false), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-pending', + status: RebalanceOperationStatus.PENDING, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: { + 1: { + transactionHash: '0xOriginTx', + metadata: { receipt: { transactionHash: '0xOriginTx' } }, + }, + }, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + // No status update should have happened + expect((mockDatabase.updateRebalanceOperation as SinonStub).called).toBe(false); + }); + + it('should transition AWAITING_CALLBACK -> AWAITING_POST_BRIDGE after callback', async () => { + const mockStargateAdapter = { + destinationCallback: stub().resolves({ + transaction: { to: '0xCallbackTarget', data: '0xCallbackData', funcSig: 'callback' }, + memo: 'destination-callback', + }), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-awaiting-callback', + status: RebalanceOperationStatus.AWAITING_CALLBACK, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: { + 1: { + transactionHash: '0xOriginTx', + metadata: { receipt: { transactionHash: '0xOriginTx' } }, + }, + }, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + const updateCalls = (mockDatabase.updateRebalanceOperation as SinonStub).getCalls(); + expect( + updateCalls.some((call) => call.args[1].status === RebalanceOperationStatus.AWAITING_POST_BRIDGE), + ).toBe(true); + expect(submitTransactionWithLogging).toHaveBeenCalled(); + }); + + it('should handle no destination callback (Stargate case)', async () => { + const mockStargateAdapter = { + destinationCallback: stub().resolves(null), + }; + mockRebalanceAdapter.getAdapter.returns(mockStargateAdapter as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-awaiting-callback', + status: RebalanceOperationStatus.AWAITING_CALLBACK, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: { + 1: { + transactionHash: '0xOriginTx', + metadata: { receipt: { transactionHash: '0xOriginTx' } }, + }, + }, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + // Should still transition to AWAITING_POST_BRIDGE even without callback + const updateCalls = (mockDatabase.updateRebalanceOperation as SinonStub).getCalls(); + expect( + updateCalls.some((call) => call.args[1].status === RebalanceOperationStatus.AWAITING_POST_BRIDGE), + ).toBe(true); + expect(mockLogger.info.calledWithMatch('No destination callback required')).toBe(true); + }); + + it('should execute post-bridge DexSwap + AaveSupply and transition to COMPLETED', async () => { + (buildTransactionsForAction as jest.Mock) + .mockResolvedValueOnce([ + { + transaction: { to: '0xDex', data: '0xSwapData', funcSig: 'swap' }, + memo: 'dex-swap', + effectiveAmount: '1000000', + }, + ]) + .mockResolvedValueOnce([ + { + transaction: { to: '0xAave', data: '0xSupplyData', funcSig: 'supply' }, + memo: 'aave-supply', + }, + ]); + + mockRebalanceAdapter.getAdapter.returns({} as any); // adapter not used for post-bridge + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-post-bridge', + status: RebalanceOperationStatus.AWAITING_POST_BRIDGE, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: {}, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + // Should have called buildTransactionsForAction twice (DexSwap + AaveSupply) + expect(buildTransactionsForAction).toHaveBeenCalledTimes(2); + + // Should have submitted both transactions + expect(submitTransactionWithLogging).toHaveBeenCalledTimes(2); + + // Should update to COMPLETED + const updateCalls = (mockDatabase.updateRebalanceOperation as SinonStub).getCalls(); + expect(updateCalls.some((call) => call.args[1].status === RebalanceOperationStatus.COMPLETED)).toBe(true); + }); + + it('should stay AWAITING_POST_BRIDGE on error for retry next cycle', async () => { + (buildTransactionsForAction as jest.Mock).mockRejectedValueOnce(new Error('DexSwap failed')); + + mockRebalanceAdapter.getAdapter.returns({} as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-post-bridge', + status: RebalanceOperationStatus.AWAITING_POST_BRIDGE, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: {}, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptor); + + // Should NOT have updated to COMPLETED + const updateCalls = (mockDatabase.updateRebalanceOperation as SinonStub).getCalls(); + expect(updateCalls.some((call) => call.args[1].status === RebalanceOperationStatus.COMPLETED)).toBe(false); + + // Should have logged the error + expect(mockLogger.error.calledWithMatch('Failed to execute post-bridge actions, will retry')).toBe(true); + }); + + it('should error when aavePoolAddress is not set', async () => { + const descriptorNoPool = createMockDescriptor({ + getAavePoolAddress: () => undefined, + }); + + mockRebalanceAdapter.getAdapter.returns({} as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-post-bridge', + status: RebalanceOperationStatus.AWAITING_POST_BRIDGE, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: {}, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptorNoPool); + + expect(mockLogger.error.calledWithMatch('Aave pool address not set')).toBe(true); + // Should NOT update to COMPLETED + expect((mockDatabase.updateRebalanceOperation as SinonStub).called).toBe(false); + }); + + it('should use descriptor.buildPostBridgeActions to construct the action sequence', async () => { + const buildPostBridgeActionsSpy = jest.fn().mockReturnValue([ + { + type: PostBridgeActionType.DexSwap, + sellToken: '0xUSDC_MANTLE', + buyToken: '0xINTERMEDIATE_MANTLE', + slippageBps: 100, + }, + { + type: PostBridgeActionType.AaveSupply, + poolAddress: '0xAavePool', + supplyAsset: '0xINTERMEDIATE_MANTLE', + }, + ]); + + const descriptorWithSpy = createMockDescriptor({ + buildPostBridgeActions: buildPostBridgeActionsSpy, + }); + + (buildTransactionsForAction as jest.Mock).mockResolvedValue([]); + mockRebalanceAdapter.getAdapter.returns({} as any); + + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-post-bridge', + status: RebalanceOperationStatus.AWAITING_POST_BRIDGE, + bridge: 'stargate-testaave', + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xusdc_ticker', + amount: '1000000', + createdAt: new Date(), + transactions: {}, + }, + ], + total: 1, + }); + + await executeAaveTokenCallbacks(mockContext, descriptorWithSpy); + + expect(buildPostBridgeActionsSpy).toHaveBeenCalledWith({ + sourceTokenOnMantle: '0xUSDC_MANTLE', + intermediateTokenOnMantle: '0xINTERMEDIATE_MANTLE', + aavePoolAddress: '0xAavePool', + dexSwapSlippageBps: 100, + }); + }); + }); + + // ========================================== + // E. Thin wrapper wiring + // ========================================== + describe('Thin wrapper wiring', () => { + it('rebalanceAManUsde delegates to rebalanceAaveToken with correct descriptor', async () => { + // With config disabled, it should return empty - proving delegation works + const config = { + ...baseConfig, + aManUsdeRebalance: { ...createMockTokenRebalanceConfig(), enabled: false }, + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await rebalanceAManUsde(ctx); + + expect(result).toEqual([]); + expect(mockLogger.debug.calledWithMatch('aManUSDe rebalancing disabled')).toBe(true); + }); + + it('rebalanceAMansyrupUsdt delegates to rebalanceAaveToken with correct descriptor', async () => { + // With no aMansyrupUsdtRebalance config, it should return empty + const config = { + ...baseConfig, + aMansyrupUsdtRebalance: { ...createMockTokenRebalanceConfig(), enabled: false }, + } as MarkConfiguration; + const ctx = { ...mockContext, config }; + + const result = await rebalanceAMansyrupUsdt(ctx); + + expect(result).toEqual([]); + expect(mockLogger.debug.calledWithMatch('aMansyrupUSDT rebalancing disabled')).toBe(true); + }); + + it('executeAManUsdeCallbacks delegates with correct bridge tag', async () => { + await executeAManUsdeCallbacks(mockContext); + + expect( + (mockDatabase.getRebalanceOperations as SinonStub).calledWithMatch(undefined, undefined, { + bridge: 'stargate-amanusde', + }), + ).toBe(true); + }); + + it('executeAMansyrupUsdtCallbacks delegates with correct bridge tag', async () => { + await executeAMansyrupUsdtCallbacks(mockContext); + + expect( + (mockDatabase.getRebalanceOperations as SinonStub).calledWithMatch(undefined, undefined, { + bridge: 'stargate-amansyrupusdt', + }), + ).toBe(true); + }); + }); +}); diff --git a/packages/poller/test/rebalance/bridgeExecution.spec.ts b/packages/poller/test/rebalance/bridgeExecution.spec.ts new file mode 100644 index 00000000..9f1ef10c --- /dev/null +++ b/packages/poller/test/rebalance/bridgeExecution.spec.ts @@ -0,0 +1,455 @@ +import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; + +// Mock database functions +jest.mock('@mark/database', () => ({ + createRebalanceOperation: jest.fn().mockResolvedValue({ id: 'mock-op-id' }), + initializeDatabase: jest.fn(), + getPool: jest.fn(), +})); + +// Mock transaction helpers +jest.mock('../../src/helpers/transactions', () => ({ + submitTransactionWithLogging: jest.fn(() => + Promise.resolve({ + hash: '0xBridgeTxHash', + receipt: { + transactionHash: '0xBridgeTxHash', + blockNumber: 1000n, + blockHash: '0xblockhash', + from: '0xfrom', + to: '0xto', + cumulativeGasUsed: 100000n, + effectiveGasPrice: 1000000000n, + gasUsed: 50000n, + status: 'success', + contractAddress: null, + logs: [], + logsBloom: '0x', + transactionIndex: 0, + type: 'legacy', + }, + }), + ), +})); + +import { submitBridgeTransactions, executeEvmBridge } from '../../src/rebalance/bridgeExecution'; +import { submitTransactionWithLogging } from '../../src/helpers/transactions'; +import * as database from '@mark/database'; +import { + SupportedBridge, + RebalanceOperationStatus, + WalletType, + DBPS_MULTIPLIER, +} from '@mark/core'; +import { RebalanceTransactionMemo } from '@mark/rebalance'; +import { Logger } from '@mark/logger'; +import { ChainService } from '@mark/chainservice'; +import { ProcessingContext } from '../../src/init'; + +// --- Helpers --- + +function createMockLogger(): Logger { + return { + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + } as unknown as Logger; +} + +function createMockChainService(): ChainService { + return { + submitAndMonitor: jest.fn(), + getBalance: jest.fn(), + } as unknown as ChainService; +} + +function createMockAdapter(overrides?: { + getReceivedAmount?: jest.Mock; + send?: jest.Mock; +}) { + return { + type: jest.fn().mockReturnValue(SupportedBridge.Stargate), + getReceivedAmount: overrides?.getReceivedAmount ?? jest.fn().mockResolvedValue('9995'), + send: overrides?.send ?? jest.fn().mockResolvedValue([ + { + transaction: { + to: '0xBridgeContract', + data: '0xApprovalData', + value: 0n, + funcSig: 'approve(address,uint256)', + }, + memo: RebalanceTransactionMemo.Approval, + }, + { + transaction: { + to: '0xBridgeContract', + data: '0xBridgeData', + value: 0n, + funcSig: 'bridge(uint256)', + }, + memo: RebalanceTransactionMemo.Rebalance, + effectiveAmount: '9950', + }, + ]), + destinationCallback: jest.fn(), + readyOnDestination: jest.fn(), + getMinimumAmount: jest.fn(), + }; +} + +const MOCK_CONFIG = { + ownAddress: '0xOwner', +} as ProcessingContext['config']; + +// --- Tests --- + +describe('submitBridgeTransactions', () => { + const mockLogger = createMockLogger(); + const mockChainService = createMockChainService(); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('submits all transactions and captures receipt from Rebalance memo', async () => { + const bridgeTxRequests = [ + { + transaction: { to: '0xBridge', data: '0xApproval', value: 0n, funcSig: 'approve' }, + memo: RebalanceTransactionMemo.Approval, + }, + { + transaction: { to: '0xBridge', data: '0xBridge', value: 0n, funcSig: 'bridge' }, + memo: RebalanceTransactionMemo.Rebalance, + effectiveAmount: '950', + }, + ]; + + const result = await submitBridgeTransactions({ + context: { logger: mockLogger, config: MOCK_CONFIG, requestId: 'test-1' }, + chainService: mockChainService, + route: { origin: 1, destination: 5000, asset: '0xWETH' }, + bridgeType: SupportedBridge.Across, + bridgeTxRequests: bridgeTxRequests as any, + amountToBridge: 1000n, + }); + + expect(submitTransactionWithLogging).toHaveBeenCalledTimes(2); + expect(result.receipt).toBeDefined(); + expect(result.receipt!.transactionHash).toBe('0xBridgeTxHash'); + expect(result.effectiveBridgedAmount).toBe('950'); + }); + + it('uses senderOverride address when provided', async () => { + const bridgeTxRequests = [ + { + transaction: { to: '0xBridge', data: '0xData', value: 0n, funcSig: '' }, + memo: RebalanceTransactionMemo.Rebalance, + }, + ]; + + await submitBridgeTransactions({ + context: { logger: mockLogger, config: MOCK_CONFIG, requestId: 'test-2' }, + chainService: mockChainService, + route: { origin: 1, destination: 5000, asset: '0xWETH' }, + bridgeType: SupportedBridge.Across, + bridgeTxRequests: bridgeTxRequests as any, + amountToBridge: 1000n, + senderOverride: { address: '0xFiller', label: 'fill-service' }, + }); + + expect(submitTransactionWithLogging).toHaveBeenCalledWith( + expect.objectContaining({ + txRequest: expect.objectContaining({ from: '0xFiller' }), + }), + ); + }); + + it('defaults effectiveBridgedAmount to amountToBridge when no effectiveAmount', async () => { + const bridgeTxRequests = [ + { + transaction: { to: '0xBridge', data: '0xData', value: 0n, funcSig: '' }, + memo: RebalanceTransactionMemo.Rebalance, + // no effectiveAmount + }, + ]; + + const result = await submitBridgeTransactions({ + context: { logger: mockLogger, config: MOCK_CONFIG, requestId: 'test-3' }, + chainService: mockChainService, + route: { origin: 1, destination: 5000, asset: '0xWETH' }, + bridgeType: SupportedBridge.Across, + bridgeTxRequests: bridgeTxRequests as any, + amountToBridge: 2000n, + }); + + expect(result.effectiveBridgedAmount).toBe('2000'); + }); + + it('passes zodiacConfig through to submitTransactionWithLogging', async () => { + const bridgeTxRequests = [ + { + transaction: { to: '0xBridge', data: '0xData', value: 0n, funcSig: '' }, + memo: RebalanceTransactionMemo.Rebalance, + }, + ]; + const zodiacConfig = { + walletType: WalletType.Zodiac, + moduleAddress: '0xModule' as `0x${string}`, + roleKey: '0xRole' as `0x${string}`, + safeAddress: '0xSafe' as `0x${string}`, + }; + + await submitBridgeTransactions({ + context: { logger: mockLogger, config: MOCK_CONFIG, requestId: 'test-4' }, + chainService: mockChainService, + route: { origin: 1, destination: 5000, asset: '0xWETH' }, + bridgeType: SupportedBridge.Across, + bridgeTxRequests: bridgeTxRequests as any, + amountToBridge: 1000n, + zodiacConfig, + }); + + expect(submitTransactionWithLogging).toHaveBeenCalledWith( + expect.objectContaining({ zodiacConfig }), + ); + }); +}); + +describe('executeEvmBridge', () => { + let mockContext: ProcessingContext; + const mockLogger = createMockLogger(); + const mockChainService = createMockChainService(); + + beforeEach(() => { + jest.clearAllMocks(); + mockContext = { + logger: mockLogger, + requestId: 'test-exec-1', + config: MOCK_CONFIG, + chainService: mockChainService, + } as unknown as ProcessingContext; + }); + + it('completes 5-step bridge flow and returns action', async () => { + const adapter = createMockAdapter(); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }); + + expect(adapter.getReceivedAmount).toHaveBeenCalledWith('10000', expect.any(Object)); + expect(adapter.send).toHaveBeenCalledWith('0xSender', '0xRecipient', '10000', expect.any(Object)); + expect(submitTransactionWithLogging).toHaveBeenCalledTimes(2); + expect(database.createRebalanceOperation).toHaveBeenCalledWith( + expect.objectContaining({ + earmarkId: null, + tickerHash: '0xTickerHash', + bridge: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + recipient: '0xRecipient', + }), + ); + expect(result.actions).toHaveLength(1); + expect(result.actions[0].bridge).toBe(SupportedBridge.Stargate); + expect(result.effectiveBridgedAmount).toBe('9950'); + }); + + it('returns empty actions when quote fails', async () => { + const adapter = createMockAdapter({ + getReceivedAmount: jest.fn().mockRejectedValue(new Error('Quote API error')), + }); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }); + + expect(result.actions).toHaveLength(0); + expect(adapter.send).not.toHaveBeenCalled(); + expect(database.createRebalanceOperation).not.toHaveBeenCalled(); + }); + + it('returns empty actions when slippage is too high', async () => { + // Quote returns 5000 for amount 10000 — 50% slippage exceeds 1% tolerance + const adapter = createMockAdapter({ + getReceivedAmount: jest.fn().mockResolvedValue('5000'), + }); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, // 1% in DBPS + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }); + + expect(result.actions).toHaveLength(0); + expect(adapter.send).not.toHaveBeenCalled(); + }); + + it('returns empty actions when send returns empty array', async () => { + const adapter = createMockAdapter({ + send: jest.fn().mockResolvedValue([]), + }); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }); + + expect(result.actions).toHaveLength(0); + expect(database.createRebalanceOperation).not.toHaveBeenCalled(); + }); + + it('uses dbRecipient override for DB record and action', async () => { + const adapter = createMockAdapter(); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xTonRecipient', + dbRecipient: '0xTacRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: 'earmark-1', + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-tac', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test TAC bridge', + }); + + // adapter.send should use tonRecipient (the `recipient` param) + expect(adapter.send).toHaveBeenCalledWith('0xSender', '0xTonRecipient', '10000', expect.any(Object)); + // DB record should use tacRecipient (the `dbRecipient` param) + expect(database.createRebalanceOperation).toHaveBeenCalledWith( + expect.objectContaining({ + recipient: '0xTacRecipient', + earmarkId: 'earmark-1', + }), + ); + // Action should also use tacRecipient + expect(result.actions[0].recipient).toBe('0xTacRecipient'); + }); + + it('uses dbAmount for DB record and action tracking when adapter uses native units', async () => { + const adapter = createMockAdapter({ + getReceivedAmount: jest.fn().mockResolvedValue('9995000'), // within slippage for 10000000 + }); + + const result = await executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000000n, // 10 USDT in 6 decimals (native units for adapter) + dbAmount: 10000000000000000000n, // 10 USDT in 18 decimals (for DB/tracking) + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-tac', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }); + + // adapter.send should use the native-unit amount + expect(adapter.send).toHaveBeenCalledWith('0xSender', '0xRecipient', '10000000', expect.any(Object)); + // action amount should use dbAmount (18 decimals) for tracking + expect(result.actions[0].amount).toBe('10000000000000000000'); + // effectiveBridgedAmount default should use dbAmount (18 decimals) + // (the mock adapter returns effectiveAmount '9950' which overrides, so check via the mock) + expect(result.effectiveBridgedAmount).toBe('9950'); + }); + + it('throws on tx submission failure (caller catches)', async () => { + const adapter = createMockAdapter(); + (submitTransactionWithLogging as jest.Mock).mockRejectedValueOnce(new Error('tx failed')); + + await expect( + executeEvmBridge({ + context: mockContext, + adapter: adapter as any, + route: { origin: 1, destination: 5000, asset: '0xToken' }, + amount: 10000n, + sender: '0xSender', + recipient: '0xRecipient', + slippageTolerance: 100n, + slippageMultiplier: DBPS_MULTIPLIER, + chainService: mockChainService, + dbRecord: { + earmarkId: null, + tickerHash: '0xTickerHash', + bridgeTag: 'stargate-test', + status: RebalanceOperationStatus.PENDING, + }, + label: 'test bridge', + }), + ).rejects.toThrow('tx failed'); + }); +}); diff --git a/packages/poller/test/rebalance/callbackEngine.spec.ts b/packages/poller/test/rebalance/callbackEngine.spec.ts new file mode 100644 index 00000000..1966c0b7 --- /dev/null +++ b/packages/poller/test/rebalance/callbackEngine.spec.ts @@ -0,0 +1,285 @@ +import { describe, it, expect, beforeEach } from '@jest/globals'; +import { runCallbackLoop, CallbackDescriptor, RebalanceOperation } from '../../src/rebalance/callbackEngine'; +import { ProcessingContext } from '../../src/init'; +import { Logger } from '@mark/logger'; +import { RebalanceOperationStatus } from '@mark/core'; + +// --- Helpers --- + +function createMockLogger(): Logger { + return { + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + } as unknown as Logger; +} + +function createMockOperation(overrides?: Partial): RebalanceOperation { + return { + id: 'op-1', + earmarkId: null, + originChainId: 1, + destinationChainId: 5000, + tickerHash: '0xabc', + amount: '1000000', + slippage: 100, + status: RebalanceOperationStatus.PENDING, + bridge: 'stargate-test', + createdAt: new Date(), // fresh — not timed out + updatedAt: new Date(), + recipient: '0xRecipient', + transactions: {}, + ...overrides, + } as unknown as RebalanceOperation; +} + +function createMockContext( + logger: Logger, + operations: RebalanceOperation[] = [], +): ProcessingContext { + return { + logger, + requestId: 'test-request-id', + config: {} as ProcessingContext['config'], + database: { + getRebalanceOperations: jest.fn().mockResolvedValue({ operations }), + updateRebalanceOperation: jest.fn().mockResolvedValue(undefined), + }, + } as unknown as ProcessingContext; +} + +// --- Tests --- + +describe('runCallbackLoop', () => { + let logger: Logger; + + beforeEach(() => { + jest.clearAllMocks(); + logger = createMockLogger(); + }); + + it('processes no operations when none found', async () => { + const context = createMockContext(logger, []); + const processOperation = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + expect(processOperation).not.toHaveBeenCalled(); + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining('Executing callbacks for TestBridge'), + expect.any(Object), + ); + }); + + it('delegates each operation to processOperation', async () => { + const op1 = createMockOperation({ id: 'op-1' }); + const op2 = createMockOperation({ id: 'op-2' }); + const context = createMockContext(logger, [op1, op2]); + const processOperation = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + expect(processOperation).toHaveBeenCalledTimes(2); + expect(processOperation).toHaveBeenCalledWith(op1, context); + expect(processOperation).toHaveBeenCalledWith(op2, context); + }); + + it('marks timed-out operations and skips processing', async () => { + const timedOutOp = createMockOperation({ + id: 'op-timeout', + createdAt: new Date(Date.now() - 25 * 60 * 60 * 1000), // 25 hours ago + }); + const context = createMockContext(logger, [timedOutOp]); + const processOperation = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + expect(processOperation).not.toHaveBeenCalled(); + expect(context.database.updateRebalanceOperation).toHaveBeenCalledWith( + 'op-timeout', + { status: RebalanceOperationStatus.CANCELLED }, + ); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('timed out'), + expect.objectContaining({ operationId: 'op-timeout' }), + ); + }); + + it('uses custom timeout status when provided', async () => { + const timedOutOp = createMockOperation({ + id: 'op-timeout', + createdAt: new Date(Date.now() - 25 * 60 * 60 * 1000), + }); + const context = createMockContext(logger, [timedOutOp]); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + timeoutStatus: RebalanceOperationStatus.FAILED, + processOperation: jest.fn(), + }; + + await runCallbackLoop(context, descriptor); + + expect(context.database.updateRebalanceOperation).toHaveBeenCalledWith( + 'op-timeout', + { status: RebalanceOperationStatus.FAILED }, + ); + }); + + it('calls onTimeout callback after updating status', async () => { + const timedOutOp = createMockOperation({ + id: 'op-timeout', + createdAt: new Date(Date.now() - 25 * 60 * 60 * 1000), + }); + const context = createMockContext(logger, [timedOutOp]); + const onTimeout = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + onTimeout, + processOperation: jest.fn(), + }; + + await runCallbackLoop(context, descriptor); + + expect(onTimeout).toHaveBeenCalledWith(timedOutOp, context); + }); + + it('catches errors from processOperation without affecting other operations', async () => { + const op1 = createMockOperation({ id: 'op-1' }); + const op2 = createMockOperation({ id: 'op-2' }); + const context = createMockContext(logger, [op1, op2]); + const processOperation = jest.fn() + .mockRejectedValueOnce(new Error('process failed')) + .mockResolvedValueOnce(undefined); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + // Both operations were attempted + expect(processOperation).toHaveBeenCalledTimes(2); + // Error was logged for op1 + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('Failed to process TestBridge callback'), + expect.objectContaining({ operationId: 'op-1', error: expect.any(Object) }), + ); + }); + + it('catches errors from timeout handling', async () => { + const timedOutOp = createMockOperation({ + id: 'op-timeout', + createdAt: new Date(Date.now() - 25 * 60 * 60 * 1000), + }); + const context = createMockContext(logger, [timedOutOp]); + (context.database.updateRebalanceOperation as jest.Mock).mockRejectedValueOnce(new Error('db error')); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation: jest.fn(), + }; + + await runCallbackLoop(context, descriptor); + + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('Failed to handle timed-out'), + expect.objectContaining({ operationId: 'op-timeout', error: expect.any(Object) }), + ); + }); + + it('uses custom TTL from descriptor', async () => { + // Op is 2 hours old. Default TTL is 24h so it would NOT timeout. + // But custom TTL of 60 min means it SHOULD timeout. + const op = createMockOperation({ + id: 'op-custom-ttl', + createdAt: new Date(Date.now() - 2 * 60 * 60 * 1000), // 2 hours ago + }); + const context = createMockContext(logger, [op]); + const processOperation = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + ttlMinutes: 60, + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + expect(processOperation).not.toHaveBeenCalled(); + expect(context.database.updateRebalanceOperation).toHaveBeenCalledWith( + 'op-custom-ttl', + { status: RebalanceOperationStatus.CANCELLED }, + ); + }); + + it('does not timeout operation without createdAt', async () => { + const op = createMockOperation({ + id: 'op-no-date', + createdAt: undefined as unknown as Date, + }); + const context = createMockContext(logger, [op]); + const processOperation = jest.fn(); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: 'stargate-test', + statuses: [RebalanceOperationStatus.PENDING], + processOperation, + }; + + await runCallbackLoop(context, descriptor); + + expect(processOperation).toHaveBeenCalledWith(op, context); + expect(context.database.updateRebalanceOperation).not.toHaveBeenCalled(); + }); + + it('passes bridge filter and statuses to database query', async () => { + const context = createMockContext(logger, []); + const descriptor: CallbackDescriptor = { + name: 'TestBridge', + bridge: ['stargate-test', 'across-test'], + statuses: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + chainId: 5000, + processOperation: jest.fn(), + }; + + await runCallbackLoop(context, descriptor); + + expect(context.database.getRebalanceOperations).toHaveBeenCalledWith( + undefined, + undefined, + { + status: [RebalanceOperationStatus.PENDING, RebalanceOperationStatus.AWAITING_CALLBACK], + bridge: ['stargate-test', 'across-test'], + chainId: 5000, + }, + ); + }); +}); diff --git a/packages/poller/test/rebalance/helpers.spec.ts b/packages/poller/test/rebalance/helpers.spec.ts new file mode 100644 index 00000000..79898f64 --- /dev/null +++ b/packages/poller/test/rebalance/helpers.spec.ts @@ -0,0 +1,96 @@ +import { describe, it, expect, beforeEach, afterEach } from '@jest/globals'; +import { + isOperationTimedOut, + getBridgeTypeFromTag, + registerBridgeTag, + DEFAULT_OPERATION_TTL_MINUTES, +} from '../../src/rebalance/helpers'; +import { SupportedBridge } from '@mark/core'; + +describe('isOperationTimedOut', () => { + it('returns false for a freshly created operation', () => { + const createdAt = new Date(); + expect(isOperationTimedOut(createdAt)).toBe(false); + }); + + it('returns true for an operation older than default TTL (24h)', () => { + const createdAt = new Date(Date.now() - (DEFAULT_OPERATION_TTL_MINUTES + 1) * 60 * 1000); + expect(isOperationTimedOut(createdAt)).toBe(true); + }); + + it('returns false for an operation just under the default TTL', () => { + const createdAt = new Date(Date.now() - (DEFAULT_OPERATION_TTL_MINUTES - 1) * 60 * 1000); + expect(isOperationTimedOut(createdAt)).toBe(false); + }); + + it('uses custom TTL when provided', () => { + const customTtl = 60; // 1 hour + const createdAt = new Date(Date.now() - 61 * 60 * 1000); // 61 minutes ago + expect(isOperationTimedOut(createdAt, customTtl)).toBe(true); + }); + + it('returns false when custom TTL is not exceeded', () => { + const customTtl = 60; + const createdAt = new Date(Date.now() - 59 * 60 * 1000); // 59 minutes ago + expect(isOperationTimedOut(createdAt, customTtl)).toBe(false); + }); + + it('returns true at the exact boundary (1ms over)', () => { + const ttl = 10; + const createdAt = new Date(Date.now() - ttl * 60 * 1000 - 1); + expect(isOperationTimedOut(createdAt, ttl)).toBe(true); + }); +}); + +describe('getBridgeTypeFromTag', () => { + it('resolves stargate-amanusde to Stargate (explicit mapping)', () => { + expect(getBridgeTypeFromTag('stargate-amanusde')).toBe(SupportedBridge.Stargate); + }); + + it('resolves stargate-amansyrupusdt to Stargate (explicit mapping)', () => { + expect(getBridgeTypeFromTag('stargate-amansyrupusdt')).toBe(SupportedBridge.Stargate); + }); + + it('resolves stargate-tac to Stargate (explicit mapping)', () => { + expect(getBridgeTypeFromTag('stargate-tac')).toBe(SupportedBridge.Stargate); + }); + + it('resolves mantle to Mantle (explicit mapping)', () => { + expect(getBridgeTypeFromTag(SupportedBridge.Mantle)).toBe(SupportedBridge.Mantle); + }); + + it('resolves across-mantle to Across (explicit mapping)', () => { + expect(getBridgeTypeFromTag(`${SupportedBridge.Across}-mantle`)).toBe(SupportedBridge.Across); + }); + + it('resolves ccip-solana-mainnet to CCIP (explicit mapping)', () => { + expect(getBridgeTypeFromTag('ccip-solana-mainnet')).toBe(SupportedBridge.CCIP); + }); + + it('falls back to prefix extraction for unknown tags with valid prefix', () => { + // 'linea' is a valid SupportedBridge, so 'linea-custom' should resolve + expect(getBridgeTypeFromTag('linea-custom')).toBe(SupportedBridge.Linea); + }); + + it('returns undefined for completely unknown tags', () => { + expect(getBridgeTypeFromTag('unknown-bridge-foo')).toBeUndefined(); + }); + + it('returns undefined for empty string', () => { + expect(getBridgeTypeFromTag('')).toBeUndefined(); + }); +}); + +describe('registerBridgeTag', () => { + it('registers a new tag that can be resolved', () => { + registerBridgeTag('custom-new-tag', SupportedBridge.Zircuit); + expect(getBridgeTypeFromTag('custom-new-tag')).toBe(SupportedBridge.Zircuit); + }); + + it('overrides existing fallback with explicit mapping', () => { + // 'zircuit-special' would fallback to Zircuit via prefix + // Register it explicitly as something else + registerBridgeTag('pendle-override', SupportedBridge.Mantle); + expect(getBridgeTypeFromTag('pendle-override')).toBe(SupportedBridge.Mantle); + }); +}); diff --git a/packages/poller/test/rebalance/invariants.spec.ts b/packages/poller/test/rebalance/invariants.spec.ts new file mode 100644 index 00000000..28ea5cd3 --- /dev/null +++ b/packages/poller/test/rebalance/invariants.spec.ts @@ -0,0 +1,422 @@ +/** + * Rebalancer invariant tests: verifies decimal conversions, amount math, + * address routing, and threshold/cap logic across all rebalancer flows. + * + * These tests exercise the ACTUAL helper functions (not mocks) to catch + * unit-mismatch bugs like comparing 18-decimal vs 6-decimal values. + */ +import { describe, it, expect } from '@jest/globals'; +import { convertTo18Decimals, convertToNativeUnits } from '../../src/helpers/asset'; +import { safeParseBigInt } from '../../src/helpers/balance'; + +// ─── Token decimal constants (mirrored from each rebalancer) ──────────────── + +const USDC_DECIMALS = 6; +const USDT_DECIMALS = 6; +const WETH_DECIMALS = 18; +const METH_DECIMALS = 18; +const AMANUSDE_DECIMALS = 18; // aToken on Mantle +const AMANSYRUPUSDT_DECIMALS = 6; // aToken on Mantle (syrupUSDT is 6 decimal) +const PTUSDE_MAINNET_DECIMALS = 18; +const PTUSDE_SOLANA_DECIMALS = 9; +const USDC_SOLANA_DECIMALS = 6; + +// ─── Decimal conversion tests ────────────────────────────────────────────── + +describe('Decimal conversions', () => { + describe('convertTo18Decimals', () => { + it('converts 6-decimal USDC to 18-decimal correctly', () => { + const oneUsdc = 1_000_000n; // 1 USDC in 6 decimals + const result = convertTo18Decimals(oneUsdc, USDC_DECIMALS); + expect(result).toBe(1_000_000_000_000_000_000n); // 1e18 + }); + + it('converts 9-decimal Solana ptUSDe to 18-decimal', () => { + const onePtUsde = 1_000_000_000n; // 1 ptUSDe in 9 decimals + const result = convertTo18Decimals(onePtUsde, PTUSDE_SOLANA_DECIMALS); + expect(result).toBe(1_000_000_000_000_000_000n); + }); + + it('is identity for 18-decimal tokens (WETH, mETH, aManUSDe)', () => { + const oneEth = 1_000_000_000_000_000_000n; + expect(convertTo18Decimals(oneEth, WETH_DECIMALS)).toBe(oneEth); + expect(convertTo18Decimals(oneEth, METH_DECIMALS)).toBe(oneEth); + expect(convertTo18Decimals(oneEth, AMANUSDE_DECIMALS)).toBe(oneEth); + }); + + it('handles zero amount', () => { + expect(convertTo18Decimals(0n, USDC_DECIMALS)).toBe(0n); + }); + + it('handles very large amounts without overflow', () => { + const largeUsdc = 1_000_000_000_000n; // 1M USDC in native + const result = convertTo18Decimals(largeUsdc, USDC_DECIMALS); + expect(result).toBe(1_000_000_000_000_000_000_000_000n); // 1M in 18-dec + }); + + it('treats undefined decimals as 18 (no conversion)', () => { + const amount = 1_000_000_000_000_000_000n; + expect(convertTo18Decimals(amount, undefined)).toBe(amount); + }); + }); + + describe('convertToNativeUnits', () => { + it('converts 18-decimal to 6-decimal USDC correctly', () => { + const oneUsdcIn18 = 1_000_000_000_000_000_000n; + const result = convertToNativeUnits(oneUsdcIn18, USDC_DECIMALS); + expect(result).toBe(1_000_000n); // 1 USDC in native + }); + + it('converts 18-decimal to 9-decimal Solana ptUSDe', () => { + const onePtUsdeIn18 = 1_000_000_000_000_000_000n; + const result = convertToNativeUnits(onePtUsdeIn18, PTUSDE_SOLANA_DECIMALS); + expect(result).toBe(1_000_000_000n); + }); + + it('is identity for 18-decimal tokens', () => { + const oneEth = 1_000_000_000_000_000_000n; + expect(convertToNativeUnits(oneEth, WETH_DECIMALS)).toBe(oneEth); + }); + + it('truncates sub-unit remainders (floor division)', () => { + // 1.5 USDC in 18 decimals + const oneAndHalfUsdc18 = 1_500_000_000_000_000_000n; + const result = convertToNativeUnits(oneAndHalfUsdc18, USDC_DECIMALS); + expect(result).toBe(1_500_000n); // exact in this case + + // 1 wei in 18 decimals → 0 in 6-decimal (lost to truncation) + expect(convertToNativeUnits(1n, USDC_DECIMALS)).toBe(0n); + }); + + it('treats undefined decimals as 18 (no conversion)', () => { + const amount = 1_000_000_000_000_000_000n; + expect(convertToNativeUnits(amount, undefined)).toBe(amount); + }); + }); + + describe('round-trip consistency', () => { + it('6-decimal → 18-decimal → 6-decimal is lossless for whole units', () => { + const amounts = [1_000_000n, 100_000_000n, 999_999_999_999n]; + for (const amount of amounts) { + const to18 = convertTo18Decimals(amount, USDC_DECIMALS); + const backTo6 = convertToNativeUnits(to18, USDC_DECIMALS); + expect(backTo6).toBe(amount); + } + }); + + it('9-decimal → 18-decimal → 9-decimal is lossless for whole units', () => { + const amount = 1_000_000_000n; // 1 ptUSDe in 9 decimals + const to18 = convertTo18Decimals(amount, PTUSDE_SOLANA_DECIMALS); + const backTo9 = convertToNativeUnits(to18, PTUSDE_SOLANA_DECIMALS); + expect(backTo9).toBe(amount); + }); + + it('18-decimal → native-decimal for getEvmBalance + comparison must use same units', () => { + // getEvmBalance returns 18-decimal; operation.amount is 6-decimal + const rawBalance = 500_000n; // 0.5 USDC on-chain (6 dec) + const balance18 = convertTo18Decimals(rawBalance, USDC_SOLANA_DECIMALS); + // To compare with operation.amount (6-dec), must convert back: + const balanceNative = balance18 / BigInt(10 ** (18 - USDC_SOLANA_DECIMALS)); + expect(balanceNative).toBe(rawBalance); + + // Directly comparing balance18 to rawBalance would be WRONG: + expect(balance18).not.toBe(rawBalance); // 5e17 !== 5e5 + expect(balance18 > rawBalance).toBe(true); + }); + }); +}); + +// ─── safeParseBigInt tests ───────────────────────────────────────────────── + +describe('safeParseBigInt', () => { + it('parses valid integer strings', () => { + expect(safeParseBigInt('1000000')).toBe(1_000_000n); + expect(safeParseBigInt('0')).toBe(0n); + }); + + it('returns default for undefined/null/empty', () => { + expect(safeParseBigInt(undefined)).toBe(0n); + expect(safeParseBigInt(null)).toBe(0n); + expect(safeParseBigInt('')).toBe(0n); + }); + + it('returns custom default value', () => { + expect(safeParseBigInt(undefined, 42n)).toBe(42n); + }); + + it('handles large numbers', () => { + expect(safeParseBigInt('1000000000000000000000')).toBe(1_000_000_000_000_000_000_000n); + }); +}); + +// ─── Threshold & amount cap invariants ──────────────────────────────────── + +describe('Threshold engine invariants', () => { + describe('shortfall calculation', () => { + it('shortfall = target - balance when balance < target', () => { + const target = 2000n; + const balance = 500n; + const shortfall = balance < target ? target - balance : 0n; + expect(shortfall).toBe(1500n); + }); + + it('shortfall = 0 when balance >= target', () => { + const target = 2000n; + const balance = 2500n; + const shortfall = balance < target ? target - balance : 0n; + expect(shortfall).toBe(0n); + }); + + it('shortfall = 0 when balance equals target', () => { + const target = 2000n; + const balance = 2000n; + const shortfall = balance < target ? target - balance : 0n; + expect(shortfall).toBe(0n); + }); + }); + + describe('amount capping', () => { + it('amount = min(bridgeAmount, senderBalance)', () => { + const bridgeAmount = 1500n; + const senderBalance = 300n; + const amount = senderBalance < bridgeAmount ? senderBalance : bridgeAmount; + expect(amount).toBe(300n); + }); + + it('amount is capped by max when set', () => { + let amount = 1500n; + const max = 800n; + if (max && max > 0n && amount > max) amount = max; + expect(amount).toBe(800n); + }); + + it('max=0 does NOT cap (treated as unlimited)', () => { + let amount = 1500n; + const max = 0n; + if (max && max > 0n && amount > max) amount = max; + expect(amount).toBe(1500n); // unchanged + }); + + it('max=undefined does NOT cap', () => { + let amount = 1500n; + const max: bigint | undefined = undefined; + if (max && max > 0n && amount > max) amount = max; + expect(amount).toBe(1500n); // unchanged + }); + + it('amount below min is rejected', () => { + const amount = 50n; + const min = 100n; + expect(amount < min).toBe(true); + }); + }); +}); + +// ─── Rebalancer-specific decimal flow tests ──────────────────────────────── + +describe('mETH rebalancer decimal flow', () => { + // mETH/WETH: 18 decimals natively. Config thresholds are in wei. + // getEvmBalance returns 18-decimal. No conversion needed. + + it('threshold comparison uses consistent 18-decimal units', () => { + const balanceFromGetEvmBalance = 500_000_000_000_000_000n; // 0.5 ETH (18 dec) + const thresholdFromConfig = safeParseBigInt('1000000000000000000'); // 1 ETH + const targetFromConfig = safeParseBigInt('2000000000000000000'); // 2 ETH + + expect(balanceFromGetEvmBalance < thresholdFromConfig).toBe(true); // triggers rebalance + const shortfall = targetFromConfig - balanceFromGetEvmBalance; + expect(shortfall).toBe(1_500_000_000_000_000_000n); // 1.5 ETH + }); + + it('minRebalanceAmount is in wei (18 decimals)', () => { + const minFromConfig = safeParseBigInt('100000000000000000'); // 0.1 ETH + const amountToBridge = 1_500_000_000_000_000_000n; // 1.5 ETH + expect(amountToBridge >= minFromConfig).toBe(true); + }); +}); + +describe('aaveToken rebalancer decimal flow', () => { + // aManUSDe: aToken is 18-decimal, source USDC is 6-decimal + // Threshold/target are in 18-decimal (aToken balance from getEvmBalance) + // Bridge amount must be in 6-decimal native USDC + + it('shortfall (18-dec) converts to native USDC (6-dec) for bridge', () => { + const shortfall18 = 500_000_000_000_000_000_000n; // 500 aManUSDe in 18-dec + const bridgeAmount = convertToNativeUnits(shortfall18, USDC_DECIMALS); + expect(bridgeAmount).toBe(500_000_000n); // 500 USDC in 6-dec + }); + + it('sender balance (from getEvmBalance 18-dec) converts to native USDC (6-dec)', () => { + const senderBalance18 = 1_000_000_000_000_000_000_000n; // 1000 USDC in 18-dec + const senderBalanceNative = convertToNativeUnits(senderBalance18, USDC_DECIMALS); + expect(senderBalanceNative).toBe(1_000_000_000n); // 1000 USDC in 6-dec + }); + + it('minRebalanceAmount (6-dec config) matches bridge amount units', () => { + const minFromConfig = safeParseBigInt('1000000'); // 1 USDC in 6-dec + const bridgeAmount = 500_000_000n; // 500 USDC in 6-dec + expect(bridgeAmount >= minFromConfig).toBe(true); + }); + + describe('aMansyrupUSDT: aToken is 6-decimal', () => { + it('threshold/target must be in 18-decimal (getEvmBalance normalizes)', () => { + // On-chain: 100 aMansyrupUSDT = 100_000_000 (6-dec) + // getEvmBalance converts to: 100_000_000_000_000_000_000 (18-dec) + const rawBalance = 100_000_000n; + const balance18 = convertTo18Decimals(rawBalance, AMANSYRUPUSDT_DECIMALS); + expect(balance18).toBe(100_000_000_000_000_000_000n); + + // Config threshold must be in 18-dec to compare correctly: + const threshold18 = safeParseBigInt('100000000000000000000'); // 100 in 18-dec + expect(balance18 >= threshold18).toBe(true); + + // WRONG: If threshold were in 6-dec (common misconfiguration): + const threshold6 = safeParseBigInt('100000000'); // 100 in 6-dec + // This would mean threshold = 0.0000000001 in 18-dec → always above threshold + expect(balance18 >= threshold6).toBe(true); // misleadingly passes + }); + }); +}); + +describe('Solana USDC rebalancer decimal flow', () => { + // ptUSDe: 9-dec on Solana, 18-dec on Mainnet + // USDC: 6-dec on both Solana and Mainnet + // Threshold/target: in 9-dec Solana ptUSDe (NOT 18-dec normalized) + + it('Solana ptUSDe shortfall (9-dec) converts to Mainnet ptUSDe (18-dec)', () => { + const shortfall9 = 100_000_000_000n; // 100 ptUSDe in 9-dec + const shortfall18 = shortfall9 * BigInt(10 ** (PTUSDE_MAINNET_DECIMALS - PTUSDE_SOLANA_DECIMALS)); + expect(shortfall18).toBe(100_000_000_000_000_000_000n); // 100 ptUSDe in 18-dec + }); + + it('ptUSDe (18-dec) to USDC (6-dec) estimate uses correct divisor', () => { + const ptUsde18 = 100_000_000_000_000_000_000n; // 100 ptUSDe in 18-dec + const estimatedUsdc6 = ptUsde18 / BigInt(10 ** (PTUSDE_MAINNET_DECIMALS - USDC_SOLANA_DECIMALS)); + expect(estimatedUsdc6).toBe(100_000_000n); // 100 USDC in 6-dec (1:1 estimate) + }); + + it('getEvmBalance (18-dec) must be converted to 6-dec before comparing to operation.amount', () => { + // This is the exact scenario of Bug #4 (now fixed) + const onChainUsdc = 50_000_000n; // 50 USDC raw on-chain + const balance18 = convertTo18Decimals(onChainUsdc, USDC_SOLANA_DECIMALS); + const operationAmount6 = 100_000_000n; // 100 USDC in DB (6-dec) + + // WRONG: comparing 18-dec to 6-dec + expect(balance18 < operationAmount6).toBe(false); // 5e16 < 1e8 is FALSE — bug! + + // CORRECT: convert balance18 to native first + const balanceNative = balance18 / BigInt(10 ** (18 - USDC_SOLANA_DECIMALS)); + expect(balanceNative < operationAmount6).toBe(true); // 5e7 < 1e8 is TRUE — correct + }); + + it('threshold comparison uses 9-decimal Solana units directly', () => { + // Solana ptUSDe balance from SPL token account (9-dec) + const balance = 50_000_000_000n; // 50 ptUSDe in 9-dec + const threshold = safeParseBigInt('100000000000'); // 100 ptUSDe in 9-dec + const target = safeParseBigInt('500000000000'); // 500 ptUSDe in 9-dec + + expect(balance < threshold).toBe(true); // triggers rebalance + const shortfall = target - balance; + expect(shortfall).toBe(450_000_000_000n); // 450 ptUSDe + }); +}); + +describe('TAC USDT rebalancer decimal flow', () => { + // USDT: 6-dec on ETH/TAC + // All internal comparisons in 18-dec normalized + // Config values (threshold, target, min, max) are in native 6-dec, converted to 18-dec + + it('config threshold (6-dec) converts to 18-dec for comparison', () => { + const thresholdNative = safeParseBigInt('100000000'); // 100 USDT in 6-dec + const threshold18 = convertTo18Decimals(thresholdNative, USDT_DECIMALS); + expect(threshold18).toBe(100_000_000_000_000_000_000n); // 100 in 18-dec + }); + + it('getEvmBalance (18-dec) compares correctly to converted threshold', () => { + const balance18 = 50_000_000_000_000_000_000n; // 50 USDT from getEvmBalance + const threshold18 = convertTo18Decimals(100_000_000n, USDT_DECIMALS); // 100 USDT + + expect(balance18 < threshold18).toBe(true); // triggers rebalance + }); + + it('shortfall (18-dec) converts to native (6-dec) for bridge execution', () => { + const shortfall18 = 50_000_000_000_000_000_000n; // 50 USDT in 18-dec + const shortfallNative = convertToNativeUnits(shortfall18, USDT_DECIMALS); + expect(shortfallNative).toBe(50_000_000n); // 50 USDT in 6-dec + }); + + it('committedAmount deduction uses consistent 18-dec units', () => { + const availableEthUsdt18 = 1_000_000_000_000_000_000_000n; // 1000 USDT + const committed18 = 200_000_000_000_000_000_000n; // 200 USDT committed + const remaining = availableEthUsdt18 - committed18; + expect(remaining).toBe(800_000_000_000_000_000_000n); // 800 USDT + }); +}); + +// ─── Address routing validation tests ────────────────────────────────────── + +describe('Address routing invariants', () => { + it('operation.recipient should always be the Mantle destination address', () => { + // In mantleEth Leg 2, we must bridge to operation.recipient (the intended + // Mantle address), NOT evmSender (the mainnet sender who stakes WETH). + const fsAddress = '0xFillServiceMantle'; + const fsSenderAddress = '0xFillServiceMainnet'; // may differ from fsAddress + const operationRecipient = fsAddress; // stored during Leg 1 creation + + // Leg 2 should bridge to operationRecipient, not fsSenderAddress + expect(operationRecipient).toBe(fsAddress); + expect(operationRecipient).not.toBe(fsSenderAddress); + }); + + it('earmarkId should propagate from Leg 1 to Leg 2', () => { + const leg1EarmarkId = 'earmark-123'; + // Bug #9 fix: Leg 2 should carry the earmark + const leg2EarmarkId = leg1EarmarkId ?? null; + expect(leg2EarmarkId).toBe('earmark-123'); + + // Without earmark (threshold-based): + const noEarmark = null ?? null; + expect(noEarmark).toBeNull(); + }); +}); + +// ─── Slippage invariants ─────────────────────────────────────────────────── + +describe('Slippage handling', () => { + it('slippageDbps default is 500 (5%)', () => { + const configValue: number | undefined = undefined; + const slippage = configValue ?? 500; + expect(slippage).toBe(500); + }); + + it('slippageDbps can safely convert to BigInt', () => { + const slippage = 500; + expect(() => BigInt(slippage)).not.toThrow(); + expect(BigInt(slippage)).toBe(500n); + }); + + it('undefined slippageDbps with ?? 500 does not throw on BigInt conversion', () => { + const configValue: number | undefined = undefined; + const slippage = configValue ?? 500; + expect(() => BigInt(slippage)).not.toThrow(); + }); + + describe('dex-swap approval amount', () => { + it('approval includes slippage padding', () => { + const swapAmount = 1_000_000n; // 1 USDC + const slippageBps = 100; // 1% + const approvalAmount = swapAmount + (swapAmount * BigInt(slippageBps)) / BigInt(10000); + expect(approvalAmount).toBe(1_010_000n); // 1.01 USDC + }); + + it('allowance check uses padded amount, not raw swap amount', () => { + const swapAmount = 1_000_000n; + const slippageBps = 100; + const approvalAmount = swapAmount + (swapAmount * BigInt(slippageBps)) / BigInt(10000); + + // Existing allowance equals swapAmount but less than approvalAmount + const existingAllowance = 1_000_000n; + expect(existingAllowance < approvalAmount).toBe(true); // needs new approval + }); + }); +}); diff --git a/packages/poller/test/rebalance/mantleEth.spec.ts b/packages/poller/test/rebalance/mantleEth.spec.ts index c30b581a..75401005 100644 --- a/packages/poller/test/rebalance/mantleEth.spec.ts +++ b/packages/poller/test/rebalance/mantleEth.spec.ts @@ -524,7 +524,7 @@ describe('mETH Rebalancing', () => { const infoCalls = mockLogger.info.getCalls(); const enoughBalanceLog = infoCalls.find( - (call) => call.args[0] && call.args[0].includes('FS receiver has enough mETH, no rebalance needed'), + (call) => call.args[0] && call.args[0].includes('recipient balance above threshold, no rebalance needed'), ); expect(enoughBalanceLog).toBeTruthy(); }); @@ -561,9 +561,9 @@ describe('mETH Rebalancing', () => { config: smallShortfallConfig, } as unknown as ProcessingContext); - const debugCalls = mockLogger.debug.getCalls(); - const shortfallLog = debugCalls.find( - (call) => call.args[0] && call.args[0].includes('FS shortfall below minimum rebalance amount'), + const warnCalls = mockLogger.warn.getCalls(); + const shortfallLog = warnCalls.find( + (call) => call.args[0] && call.args[0].includes('available amount below minimum rebalance threshold, skipping'), ); expect(shortfallLog).toBeTruthy(); }); @@ -584,15 +584,9 @@ describe('mETH Rebalancing', () => { await rebalanceMantleEth(mockContext as unknown as ProcessingContext); - const warnCalls = mockLogger.warn.getCalls(); - const insufficientLog = warnCalls.find( - (call) => call.args[0] && call.args[0].includes('FS sender has insufficient WETH to cover the full shortfall'), - ); - expect(insufficientLog).toBeTruthy(); - const infoCalls = mockLogger.info.getCalls(); const triggerLog = infoCalls.find( - (call) => call.args[0] && call.args[0].includes('FS threshold rebalancing triggered'), + (call) => call.args[0] && call.args[0].includes('threshold rebalance triggered'), ); expect(triggerLog).toBeTruthy(); }); @@ -614,7 +608,7 @@ describe('mETH Rebalancing', () => { const warnCalls = mockLogger.warn.getCalls(); const belowMinLog = warnCalls.find( - (call) => call.args[0] && call.args[0].includes('Available WETH below minimum rebalance threshold'), + (call) => call.args[0] && call.args[0].includes('available amount below minimum rebalance threshold, skipping'), ); expect(belowMinLog).toBeTruthy(); }); @@ -963,7 +957,7 @@ describe('mETH Rebalancing', () => { const warnCalls = mockLogger.warn.getCalls(); const errorLog = warnCalls.find( - (call) => call.args[0] && call.args[0].includes('Failed to check FS receiver mETH balance'), + (call) => call.args[0] && call.args[0].includes('failed to get recipient balance'), ); expect(errorLog).toBeTruthy(); }); @@ -973,6 +967,9 @@ describe('mETH Rebalancing', () => { if (chainId === MAINNET_CHAIN_ID.toString() && address === MOCK_FS_SENDER_ADDRESS) { throw new Error('RPC error'); } + if (chainId === MANTLE_CHAIN_ID.toString() && address === MOCK_FS_ADDRESS) { + return BigInt('50000000000000000000'); // 50 mETH (below 100 threshold) + } return BigInt('1000000000000000000000'); }); @@ -980,7 +977,7 @@ describe('mETH Rebalancing', () => { const warnCalls = mockLogger.warn.getCalls(); const errorLog = warnCalls.find( - (call) => call.args[0] && call.args[0].includes('Failed to check FS sender WETH balance'), + (call) => call.args[0] && call.args[0].includes('failed to get sender balance'), ); expect(errorLog).toBeTruthy(); }); diff --git a/packages/poller/test/rebalance/rebalance.spec.ts b/packages/poller/test/rebalance/rebalance.spec.ts index c3ab1534..183e08f6 100644 --- a/packages/poller/test/rebalance/rebalance.spec.ts +++ b/packages/poller/test/rebalance/rebalance.spec.ts @@ -710,7 +710,7 @@ describe('rebalanceInventory', () => { (call) => call.args[0] && typeof call.args[0] === 'string' && - call.args[0].includes('Failed to get bridge transaction request from adapter, trying next preference'), + call.args[0].includes('Failed to get') && call.args[0].includes('bridge transactions'), ); expect(sendFailedMessage).toBeTruthy(); @@ -954,7 +954,7 @@ describe('rebalanceInventory', () => { // We should see bridge transaction submissions const logCalls = mockLogger.info.getCalls(); const hasBridgeLog = logCalls.some( - (call) => call.args[0] && call.args[0].includes('Successfully submitted and confirmed origin bridge transaction'), + (call) => call.args[0] && call.args[0].includes('Successfully submitted bridge transaction'), ); expect(hasBridgeLog).toBe(true); @@ -1075,7 +1075,7 @@ describe('rebalanceInventory', () => { // Check that the logger was called with the expected message const errorCalls = mockLogger.error.getCalls(); const quoteFailedMessage = errorCalls.find( - (call) => call.args[0] && call.args[0].includes('Failed to get quote from adapter'), + (call) => call.args[0] && call.args[0].includes('Failed to get') && call.args[0].includes('quote'), ); expect(quoteFailedMessage).toBeTruthy(); expect(mockAdapterA.getReceivedAmount.calledOnce).toBe(true); @@ -1150,7 +1150,7 @@ describe('rebalanceInventory', () => { // Verify successful rebalance with second adapter const infoCalls = mockLogger.info.getCalls(); const successMessage = infoCalls.find( - (call) => call.args[0] && call.args[0].includes('Quote meets slippage requirements'), + (call) => call.args[0] && call.args[0].includes('Successfully created'), ); expect(successMessage).toBeTruthy(); @@ -1273,7 +1273,7 @@ describe('rebalanceInventory', () => { // Check that the logger was called with the expected message const errorCalls = mockLogger.error.getCalls(); const sendFailedMessage = errorCalls.find( - (call) => call.args[0] && call.args[0].includes('Failed to get bridge transaction request from adapter'), + (call) => call.args[0] && call.args[0].includes('Failed to') && call.args[0].includes('bridge'), ); expect(sendFailedMessage).toBeTruthy(); expect(mockAdapterA_sendFails.send.calledOnce).toBe(true); diff --git a/packages/poller/test/rebalance/registry.spec.ts b/packages/poller/test/rebalance/registry.spec.ts new file mode 100644 index 00000000..4f6e148e --- /dev/null +++ b/packages/poller/test/rebalance/registry.spec.ts @@ -0,0 +1,113 @@ +import { describe, it, expect, beforeEach } from '@jest/globals'; + +// We need to test the registry in isolation, but it uses module-level state. +// Use jest.isolateModules to get fresh state per test. + +describe('registry', () => { + function loadFreshRegistry() { + let mod: typeof import('../../src/rebalance/registry'); + jest.isolateModules(() => { + mod = require('../../src/rebalance/registry'); + }); + return mod!; + } + + it('registers a rebalancer and retrieves it', () => { + const { registerRebalancer, getRegisteredRebalancers } = loadFreshRegistry(); + + const handler = jest.fn(); + registerRebalancer({ runMode: 'testMode', displayName: 'Test', handler }); + + const rebalancers = getRegisteredRebalancers(); + expect(rebalancers).toHaveLength(1); + expect(rebalancers[0].runMode).toBe('testMode'); + expect(rebalancers[0].displayName).toBe('Test'); + expect(rebalancers[0].handler).toBe(handler); + }); + + it('throws on duplicate runMode registration', () => { + const { registerRebalancer } = loadFreshRegistry(); + + const handler = jest.fn(); + registerRebalancer({ runMode: 'dup', displayName: 'First', handler }); + + expect(() => { + registerRebalancer({ runMode: 'dup', displayName: 'Second', handler }); + }).toThrow('Duplicate rebalancer registration for runMode: dup'); + }); + + it('allows different runModes', () => { + const { registerRebalancer, getRegisteredRebalancers } = loadFreshRegistry(); + + registerRebalancer({ runMode: 'a', displayName: 'A', handler: jest.fn() }); + registerRebalancer({ runMode: 'b', displayName: 'B', handler: jest.fn() }); + + expect(getRegisteredRebalancers()).toHaveLength(2); + }); + + it('returns readonly array', () => { + const { getRegisteredRebalancers } = loadFreshRegistry(); + const result = getRegisteredRebalancers(); + expect(Array.isArray(result)).toBe(true); + }); + + describe('getRegisteredBridgeTags', () => { + it('returns empty set when no rebalancers have bridge tags', () => { + const { registerRebalancer, getRegisteredBridgeTags } = loadFreshRegistry(); + + registerRebalancer({ runMode: 'noTags', displayName: 'No Tags', handler: jest.fn() }); + + const tags = getRegisteredBridgeTags(); + expect(tags.size).toBe(0); + }); + + it('collects bridge tags from all registered rebalancers', () => { + const { registerRebalancer, getRegisteredBridgeTags } = loadFreshRegistry(); + + registerRebalancer({ + runMode: 'a', + displayName: 'A', + handler: jest.fn(), + bridgeTags: ['tag-a1', 'tag-a2'], + }); + registerRebalancer({ + runMode: 'b', + displayName: 'B', + handler: jest.fn(), + bridgeTags: ['tag-b1'], + }); + registerRebalancer({ + runMode: 'c', + displayName: 'C', + handler: jest.fn(), + // no bridgeTags + }); + + const tags = getRegisteredBridgeTags(); + expect(tags.size).toBe(3); + expect(tags.has('tag-a1')).toBe(true); + expect(tags.has('tag-a2')).toBe(true); + expect(tags.has('tag-b1')).toBe(true); + }); + + it('deduplicates tags across registrations', () => { + const { registerRebalancer, getRegisteredBridgeTags } = loadFreshRegistry(); + + registerRebalancer({ + runMode: 'a', + displayName: 'A', + handler: jest.fn(), + bridgeTags: ['shared-tag'], + }); + registerRebalancer({ + runMode: 'b', + displayName: 'B', + handler: jest.fn(), + bridgeTags: ['shared-tag'], + }); + + const tags = getRegisteredBridgeTags(); + expect(tags.size).toBe(1); + }); + }); +}); diff --git a/packages/poller/test/rebalance/solanaUsdc.spec.ts b/packages/poller/test/rebalance/solanaUsdc.spec.ts index 888eb24a..fcbd06e7 100644 --- a/packages/poller/test/rebalance/solanaUsdc.spec.ts +++ b/packages/poller/test/rebalance/solanaUsdc.spec.ts @@ -52,7 +52,7 @@ jest.mock('@solana/spl-token', () => ({ toBase58: () => 'MockAssociatedTokenAddress', }), getAccount: () => Promise.resolve({ - amount: BigInt('1000000000'), + amount: BigInt('500000000000'), // 500 ptUSDe in 9 decimals (above default threshold of 100) }), })); @@ -309,30 +309,26 @@ describe('Solana USDC Rebalancing', () => { }); it('should check AWAITING_CALLBACK operations for Leg 3 completion', async () => { - // Mock AWAITING_CALLBACK operation (Leg 3 pending) - (mockDatabase.getRebalanceOperations as SinonStub) - .onFirstCall() - .resolves({ operations: [], total: 0 }) - .onSecondCall() - .resolves({ - operations: [ - { - id: 'op-123', - earmarkId: 'earmark-123', - originChainId: Number(SOLANA_CHAINID), - destinationChainId: Number(MAINNET_CHAIN_ID), - bridge: 'ccip-solana-mainnet', - status: RebalanceOperationStatus.AWAITING_CALLBACK, - transactions: { - [SOLANA_CHAINID]: { transactionHash: 'SolanaTxHash123' }, - [MAINNET_CHAIN_ID]: { transactionHash: 'MainnetTxHash123' }, - }, - amount: '1000000', - createdAt: new Date(), + // Mock operations: single query now returns both PENDING and AWAITING_CALLBACK + (mockDatabase.getRebalanceOperations as SinonStub).resolves({ + operations: [ + { + id: 'op-123', + earmarkId: 'earmark-123', + originChainId: Number(SOLANA_CHAINID), + destinationChainId: Number(MAINNET_CHAIN_ID), + bridge: 'ccip-solana-mainnet', + status: RebalanceOperationStatus.AWAITING_CALLBACK, + transactions: { + [SOLANA_CHAINID]: { transactionHash: 'SolanaTxHash123' }, + [MAINNET_CHAIN_ID]: { transactionHash: 'MainnetTxHash123' }, }, - ], - total: 1, - }); + amount: '1000000', + createdAt: new Date(), + }, + ], + total: 1, + }); // Mock CCIP adapter returning SUCCESS for Leg 3 const mockCcipAdapter = { diff --git a/packages/poller/test/rebalance/tacUsdt.spec.ts b/packages/poller/test/rebalance/tacUsdt.spec.ts index 63709b36..6994c9e1 100644 --- a/packages/poller/test/rebalance/tacUsdt.spec.ts +++ b/packages/poller/test/rebalance/tacUsdt.spec.ts @@ -1749,10 +1749,10 @@ describe('FS Rebalancing Priority Flow', () => { await rebalanceTacUsdt(mockContext as unknown as ProcessingContext); - // Should log that cross-wallet is blocked due to pending ops + // Should log that FS in-flight operations exist (caught by FS-scoped in-flight check) const infoCalls = mockLogger.info.getCalls(); const blockedLog = infoCalls.find( - (call) => call.args[0] && call.args[0].includes('Cross-wallet rebalancing blocked: pending FS operations exist'), + (call) => call.args[0] && call.args[0].includes('TAC FS in-flight rebalance operations exist'), ); expect(blockedLog).toBeTruthy(); }); diff --git a/packages/poller/test/rebalance/thresholdEngine.spec.ts b/packages/poller/test/rebalance/thresholdEngine.spec.ts new file mode 100644 index 00000000..a202b610 --- /dev/null +++ b/packages/poller/test/rebalance/thresholdEngine.spec.ts @@ -0,0 +1,324 @@ +import { describe, it, expect, beforeEach } from '@jest/globals'; +import { runThresholdRebalance, ThresholdRebalanceDescriptor } from '../../src/rebalance/thresholdEngine'; +import { RebalanceRunState } from '../../src/rebalance/types'; +import { ProcessingContext } from '../../src/init'; +import { Logger } from '@mark/logger'; +import { RebalanceAction, SupportedBridge } from '@mark/core'; + +// --- Helpers --- + +function createMockLogger(): Logger { + return { + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + } as unknown as Logger; +} + +function createMockContext(logger?: Logger): ProcessingContext { + return { + logger: logger ?? createMockLogger(), + requestId: 'test-request-id', + config: {} as ProcessingContext['config'], + } as unknown as ProcessingContext; +} + +const MOCK_ACTION: RebalanceAction = { bridge: SupportedBridge.Stargate, amount: '1000' }; + +function createDescriptor(overrides?: Partial): ThresholdRebalanceDescriptor { + return { + name: 'TestToken', + isEnabled: () => true, + hasInFlightOperations: jest.fn().mockResolvedValue(false), + getRecipientBalance: jest.fn().mockResolvedValue(500n), + getThresholds: () => ({ threshold: 1000n, target: 2000n }), + convertShortfallToBridgeAmount: jest.fn().mockImplementation(async (shortfall: bigint) => shortfall), + getSenderBalance: jest.fn().mockResolvedValue(5000n), + getAmountCaps: () => ({ min: 100n }), + executeBridge: jest.fn().mockResolvedValue([MOCK_ACTION]), + ...overrides, + }; +} + +// --- Tests --- + +describe('runThresholdRebalance', () => { + let context: ProcessingContext; + let logger: Logger; + let runState: RebalanceRunState; + + beforeEach(() => { + jest.clearAllMocks(); + logger = createMockLogger(); + context = createMockContext(logger); + runState = { committedAmount: 0n }; + }); + + it('returns empty when disabled', async () => { + const descriptor = createDescriptor({ isEnabled: () => false }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.debug).toHaveBeenCalledWith( + expect.stringContaining('disabled'), + expect.any(Object), + ); + }); + + it('returns empty when in-flight operations exist', async () => { + const descriptor = createDescriptor({ + hasInFlightOperations: jest.fn().mockResolvedValue(true), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining('in-flight'), + expect.any(Object), + ); + }); + + it('returns empty when hasInFlightOperations throws', async () => { + const descriptor = createDescriptor({ + hasInFlightOperations: jest.fn().mockRejectedValue(new Error('db error')), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('failed to check in-flight'), + expect.objectContaining({ error: expect.any(Object) }), + ); + }); + + it('returns empty when getRecipientBalance throws', async () => { + const descriptor = createDescriptor({ + getRecipientBalance: jest.fn().mockRejectedValue(new Error('rpc error')), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('failed to get recipient balance'), + expect.objectContaining({ error: expect.any(Object) }), + ); + }); + + it('returns empty when target < threshold (misconfiguration)', async () => { + const descriptor = createDescriptor({ + getThresholds: () => ({ threshold: 2000n, target: 500n }), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('misconfiguration'), + expect.objectContaining({ threshold: '2000', target: '500' }), + ); + }); + + it('returns empty when recipient balance is above threshold', async () => { + const descriptor = createDescriptor({ + getRecipientBalance: jest.fn().mockResolvedValue(1500n), + getThresholds: () => ({ threshold: 1000n, target: 2000n }), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining('above threshold'), + expect.any(Object), + ); + }); + + it('returns empty when recipient balance is exactly at threshold', async () => { + const descriptor = createDescriptor({ + getRecipientBalance: jest.fn().mockResolvedValue(1000n), + getThresholds: () => ({ threshold: 1000n, target: 2000n }), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + expect(actions).toEqual([]); + }); + + it('returns empty when convertShortfallToBridgeAmount throws', async () => { + const descriptor = createDescriptor({ + convertShortfallToBridgeAmount: jest.fn().mockRejectedValue(new Error('conversion error')), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('failed to convert shortfall'), + expect.objectContaining({ error: expect.any(Object) }), + ); + }); + + it('returns empty when getSenderBalance throws', async () => { + const descriptor = createDescriptor({ + getSenderBalance: jest.fn().mockRejectedValue(new Error('rpc error')), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('failed to get sender balance'), + expect.objectContaining({ error: expect.any(Object) }), + ); + }); + + it('returns empty when amount is below minimum cap', async () => { + const descriptor = createDescriptor({ + // shortfall = target - balance = 2000 - 500 = 1500, but sender only has 50 + getSenderBalance: jest.fn().mockResolvedValue(50n), + getAmountCaps: () => ({ min: 100n }), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('below minimum'), + expect.objectContaining({ availableAmount: '50', minRebalance: '100' }), + ); + }); + + it('caps amount at max when amount exceeds max cap', async () => { + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + // shortfall = 2000 - 500 = 1500 + getSenderBalance: jest.fn().mockResolvedValue(5000n), + getAmountCaps: () => ({ min: 100n, max: 800n }), + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + expect(executeBridge).toHaveBeenCalledWith(context, 800n); + }); + + it('uses sender balance when less than bridge amount', async () => { + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + // shortfall = 2000 - 500 = 1500, sender has 300 + getSenderBalance: jest.fn().mockResolvedValue(300n), + getAmountCaps: () => ({ min: 100n }), + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + expect(executeBridge).toHaveBeenCalledWith(context, 300n); + expect(logger.warn).toHaveBeenCalledWith( + expect.stringContaining('insufficient balance'), + expect.any(Object), + ); + }); + + it('executes bridge with full shortfall when sender has enough', async () => { + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + // balance=500, target=2000, shortfall=1500, sender=5000 + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + expect(executeBridge).toHaveBeenCalledWith(context, 1500n); + }); + + it('tracks committed amount after successful bridge', async () => { + const descriptor = createDescriptor({ + executeBridge: jest.fn().mockResolvedValue([MOCK_ACTION]), + }); + await runThresholdRebalance(context, descriptor, runState); + + expect(runState.committedAmount).toBe(1500n); + expect(logger.debug).toHaveBeenCalledWith( + expect.stringContaining('updated committed amount'), + expect.objectContaining({ bridgedAmount: '1500', totalCommitted: '1500' }), + ); + }); + + it('does not track committed amount when bridge returns no actions', async () => { + const descriptor = createDescriptor({ + executeBridge: jest.fn().mockResolvedValue([]), + }); + await runThresholdRebalance(context, descriptor, runState); + + expect(runState.committedAmount).toBe(0n); + }); + + it('accumulates committed amount across multiple calls', async () => { + const descriptor = createDescriptor({ + executeBridge: jest.fn().mockResolvedValue([MOCK_ACTION]), + }); + await runThresholdRebalance(context, descriptor, runState); + expect(runState.committedAmount).toBe(1500n); + + // Second call with same runState + await runThresholdRebalance(context, descriptor, runState); + expect(runState.committedAmount).toBe(3000n); + }); + + it('returns empty and logs error when executeBridge throws', async () => { + const descriptor = createDescriptor({ + executeBridge: jest.fn().mockRejectedValue(new Error('bridge failed')), + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([]); + expect(runState.committedAmount).toBe(0n); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('failed to execute bridge'), + expect.objectContaining({ error: expect.any(Object) }), + ); + }); + + it('applies shortfall conversion correctly', async () => { + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + // balance=500, target=2000, shortfall=1500 + // conversion doubles it: bridgeAmount=3000 + convertShortfallToBridgeAmount: jest.fn().mockImplementation(async (s: bigint) => s * 2n), + getSenderBalance: jest.fn().mockResolvedValue(10000n), + getAmountCaps: () => ({ min: 100n }), + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + expect(executeBridge).toHaveBeenCalledWith(context, 3000n); + }); + + it('skips when recipient balance above target but below threshold (edge: shortfall=0)', async () => { + // This covers the edge case where threshold > target is already guarded, + // but if recipientBalance is between target and threshold — it can't happen + // because target >= threshold is enforced. Let's test the boundary: + // threshold = target = 1000, balance = 999 → shortfall = 1 + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + getRecipientBalance: jest.fn().mockResolvedValue(999n), + getThresholds: () => ({ threshold: 1000n, target: 1000n }), + getAmountCaps: () => ({ min: 0n }), + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + expect(executeBridge).toHaveBeenCalledWith(context, 1n); + }); + + it('ignores max cap when max is 0n', async () => { + const executeBridge = jest.fn().mockResolvedValue([MOCK_ACTION]); + const descriptor = createDescriptor({ + getAmountCaps: () => ({ min: 0n, max: 0n }), + executeBridge, + }); + const actions = await runThresholdRebalance(context, descriptor, runState); + + expect(actions).toEqual([MOCK_ACTION]); + // shortfall = 1500, should NOT be capped by max=0 + expect(executeBridge).toHaveBeenCalledWith(context, 1500n); + }); +});