From 0bf9712cc021628ad5e6684d25d2076d82dc2225 Mon Sep 17 00:00:00 2001 From: Gabriel de Quadros Ligneul Date: Thu, 26 Mar 2026 15:53:08 -0300 Subject: [PATCH 1/2] Add option to config time to inclusion bucket size The time-to-inclusion chart in the generated reports previously used a fixed 1-second bucket size. This adds a `--time-to-inclusion-bucket ` flag to the report and campaign subcommands to control the bucket size in milliseconds. Set the default to 1000 ms (1 second) to keep backward compatibility. Close https://github.com/flashbots/contender/issues/497 --- CHANGELOG.md | 4 + crates/cli/CHANGELOG.md | 4 + crates/cli/src/commands/campaign.rs | 5 ++ .../cli/src/commands/contender_subcommand.rs | 4 + crates/cli/src/commands/spam.rs | 1 + crates/cli/src/main.rs | 3 + crates/report/CHANGELOG.md | 4 + crates/report/src/chart/time_to_inclusion.rs | 73 +++++++++++++++++-- crates/report/src/command.rs | 15 +++- 9 files changed, 106 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5a6d9e1..ef8f6314 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 > Note: this file did not exist until after `v0.5.6`. +## Unreleased + +- add `--time-to-inclusion-bucket` flag to configure histogram bucket size in reports + ## [0.9.0](https://github.com/flashbots/contender/releases/tag/v0.9.0) - 2026-03-17 - added `--send-raw-tx-sync` flag to `spam` and `campaign` for `eth_sendRawTransactionSync` support ([#459](https://github.com/flashbots/contender/pull/459)) diff --git a/crates/cli/CHANGELOG.md b/crates/cli/CHANGELOG.md index 06e1f23a..479331fe 100644 --- a/crates/cli/CHANGELOG.md +++ b/crates/cli/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## Unreleased + +- add `--time-to-inclusion-bucket` flag to configure histogram bucket size in reports + ## [0.9.0](https://github.com/flashbots/contender/releases/tag/v0.9.0) - 2026-03-17 - added `--send-raw-tx-sync` flag to `spam` and `campaign` commands ([#459](https://github.com/flashbots/contender/pull/459)) diff --git a/crates/cli/src/commands/campaign.rs b/crates/cli/src/commands/campaign.rs index e4491d9a..db41bfb4 100644 --- a/crates/cli/src/commands/campaign.rs +++ b/crates/cli/src/commands/campaign.rs @@ -134,6 +134,10 @@ pub struct CampaignCliArgs { long_help = "Skip per-transaction debug traces (debug_traceTransaction) when generating the campaign report. This significantly speeds up report generation for large runs at the cost of omitting the storage heatmap and tx gas used charts." )] pub skip_tx_traces: bool, + + /// Bucket size in milliseconds for the time-to-inclusion histogram. + #[arg(long, default_value_t = 1000, value_name = "MS", value_parser = clap::value_parser!(u64).range(1..=10000))] + pub time_to_inclusion_bucket: u64, } fn bump_seed(base_seed: &str, stage_name: &str) -> String { @@ -273,6 +277,7 @@ pub async fn run_campaign( data_dir, false, // use HTML format by default for campaign reports args.skip_tx_traces, + args.time_to_inclusion_bucket, ) .await?; } diff --git a/crates/cli/src/commands/contender_subcommand.rs b/crates/cli/src/commands/contender_subcommand.rs index 0f54be4d..f4e235a8 100644 --- a/crates/cli/src/commands/contender_subcommand.rs +++ b/crates/cli/src/commands/contender_subcommand.rs @@ -96,6 +96,10 @@ pub enum ContenderSubcommand { /// at the cost of omitting the storage heatmap and tx gas used charts. #[arg(long)] skip_tx_traces: bool, + + /// Bucket size in milliseconds for the time-to-inclusion histogram. + #[arg(long, default_value_t = 1000, value_name = "MS", value_parser = clap::value_parser!(u64).range(1..=10000))] + time_to_inclusion_bucket: u64, }, #[command( diff --git a/crates/cli/src/commands/spam.rs b/crates/cli/src/commands/spam.rs index 4c5292c1..ef3c28d7 100644 --- a/crates/cli/src/commands/spam.rs +++ b/crates/cli/src/commands/spam.rs @@ -955,6 +955,7 @@ pub async fn spam( &resolve_data_dir(None)?, false, // TODO: support JSON reports, maybe add a CLI flag for it false, + 1000, ) .await?; } else { diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index edaa7923..dc185dfc 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -132,6 +132,7 @@ async fn run() -> Result<(), CliError> { campaign_id, format, skip_tx_traces, + time_to_inclusion_bucket, } => { if let Some(campaign_id) = campaign_id { let resolved_campaign_id = if campaign_id == "__LATEST_CAMPAIGN__" { @@ -153,6 +154,7 @@ async fn run() -> Result<(), CliError> { &db, &data_dir, skip_tx_traces, + time_to_inclusion_bucket, ) .await .map_err(CliError::Report)?; @@ -165,6 +167,7 @@ async fn run() -> Result<(), CliError> { &data_dir, use_json, skip_tx_traces, + time_to_inclusion_bucket, ) .await .map_err(CliError::Report)?; diff --git a/crates/report/CHANGELOG.md b/crates/report/CHANGELOG.md index c789dc07..481d4cb6 100644 --- a/crates/report/CHANGELOG.md +++ b/crates/report/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## Unreleased + +- make time-to-inclusion histogram bucket size configurable; labels display in seconds when bucket size is a multiple of 1000ms + ## [0.9.0](https://github.com/flashbots/contender/releases/tag/v0.9.0) - 2026-03-17 - added RPC report template and `report_rpc()` for generating HTML reports from `contender rpc` runs diff --git a/crates/report/src/chart/time_to_inclusion.rs b/crates/report/src/chart/time_to_inclusion.rs index 51814b1f..d863da78 100644 --- a/crates/report/src/chart/time_to_inclusion.rs +++ b/crates/report/src/chart/time_to_inclusion.rs @@ -4,6 +4,8 @@ use serde::{Deserialize, Serialize}; pub struct TimeToInclusionChart { /// Contains each tx's time to inclusion in milliseconds. inclusion_times_ms: Vec, + /// Bucket size in milliseconds for the histogram. + bucket_size_ms: u64, } #[derive(Clone, Debug, Deserialize, Serialize)] @@ -14,7 +16,7 @@ pub struct TimeToInclusionData { } impl TimeToInclusionChart { - pub fn new(run_txs: &[RunTx]) -> Self { + pub fn new(run_txs: &[RunTx], bucket_size_ms: u64) -> Self { let mut inclusion_times_ms = vec![]; for tx in run_txs { let mut dumb_base = 0; @@ -28,7 +30,10 @@ impl TimeToInclusionChart { inclusion_times_ms.push(tti_ms); } } - Self { inclusion_times_ms } + Self { + inclusion_times_ms, + bucket_size_ms, + } } pub fn echart_data(&self) -> TimeToInclusionData { @@ -36,9 +41,9 @@ impl TimeToInclusionChart { let mut counts = vec![]; let mut max_count = 0; - // 1000ms (1s) per bucket + let bucket_size_ms = self.bucket_size_ms; for &tti_ms in &self.inclusion_times_ms { - let bucket_index = (tti_ms / 1000) as usize; + let bucket_index = (tti_ms / bucket_size_ms) as usize; if bucket_index >= buckets.len() { buckets.resize(bucket_index + 1, "".to_string()); counts.resize(bucket_index + 1, 0); @@ -47,7 +52,15 @@ impl TimeToInclusionChart { if counts[bucket_index] > max_count { max_count = counts[bucket_index]; } - buckets[bucket_index] = format!("{} - {} s", bucket_index, bucket_index + 1); + let start_ms = bucket_index as u64 * bucket_size_ms; + let end_ms = start_ms + bucket_size_ms; + buckets[bucket_index] = if bucket_size_ms % 1000 == 0 { + let start_s = start_ms / 1000; + let end_s = end_ms / 1000; + format!("{start_s} - {end_s} s") + } else { + format!("{start_ms} - {end_ms} ms") + }; } // Filter out empty buckets and counts that are zero @@ -64,3 +77,53 @@ impl TimeToInclusionChart { } } } + +#[cfg(test)] +mod tests { + use super::*; + use alloy::primitives::TxHash; + + fn make_tx(start_ms: u64, end_ms: u64) -> RunTx { + RunTx { + tx_hash: TxHash::ZERO, + start_timestamp_ms: start_ms, + end_timestamp_ms: Some(end_ms), + block_number: Some(1), + gas_used: Some(21000), + kind: None, + error: None, + flashblock_latency_ms: None, + flashblock_index: None, + } + } + + #[test] + fn ms_buckets() { + // 50ms and 80ms fall in bucket 0 (0-100ms), 150ms in bucket 1 + let txs = vec![make_tx(0, 50), make_tx(0, 80), make_tx(0, 150)]; + let data = TimeToInclusionChart::new(&txs, 100).echart_data(); + + assert_eq!(data.buckets, vec!["0 - 100 ms", "100 - 200 ms"]); + assert_eq!(data.counts, vec![2, 1]); + assert_eq!(data.max_count, 2); + } + + #[test] + fn second_buckets() { + // bucket size is a multiple of 1000ms, so labels should be in seconds + let txs = vec![make_tx(0, 500), make_tx(0, 1500)]; + let data = TimeToInclusionChart::new(&txs, 1000).echart_data(); + + assert_eq!(data.buckets, vec!["0 - 1 s", "1 - 2 s"]); + assert_eq!(data.counts, vec![1, 1]); + } + + #[test] + fn empty_input() { + let data = TimeToInclusionChart::new(&[], 1000).echart_data(); + + assert!(data.buckets.is_empty()); + assert!(data.counts.is_empty()); + assert_eq!(data.max_count, 0); + } +} diff --git a/crates/report/src/command.rs b/crates/report/src/command.rs index 459f76ce..db186a9a 100644 --- a/crates/report/src/command.rs +++ b/crates/report/src/command.rs @@ -47,6 +47,7 @@ pub async fn report( data_dir: &Path, use_json: bool, skip_tx_traces: bool, + time_to_inclusion_bucket: u64, ) -> Result<()> { let num_runs = db.num_runs().map_err(|e| e.into())?; @@ -308,7 +309,7 @@ pub async fn report( let heatmap = HeatMapChart::new(&cache_data.traces)?; let gas_per_block = GasPerBlockChart::new(&cache_data.blocks); - let tti = TimeToInclusionChart::new(&all_txs); + let tti = TimeToInclusionChart::new(&all_txs, time_to_inclusion_bucket); let gas_used = TxGasUsedChart::new(&cache_data.traces, 4000); let pending_txs = PendingTxsChart::new(&all_txs); let flashblock_tti = FlashblockTimeToInclusionChart::new(&all_txs); @@ -443,6 +444,7 @@ pub async fn report_campaign( db: &(impl DbOps + Clone + Send + Sync + 'static), data_dir: &Path, skip_tx_traces: bool, + time_to_inclusion_bucket: u64, ) -> Result<()> { let runs = db.get_runs_by_campaign(campaign_id).map_err(|e| e.into())?; if runs.is_empty() { @@ -468,7 +470,16 @@ pub async fn report_campaign( let run_generation_result: Result<()> = async { for run in &runs { // generate per-run report (single run) - always use HTML for campaign runs - report(Some(run.id), 0, db, data_dir, false, skip_tx_traces).await?; + report( + Some(run.id), + 0, + db, + data_dir, + false, + skip_tx_traces, + time_to_inclusion_bucket, + ) + .await?; let run_txs = db.get_run_txs(run.id).map_err(|e| e.into())?; let (run_tx_count_from_logs, run_error_count_from_logs) = tx_and_error_counts(&run_txs, run.tx_count); From bbbb7d813fd3c45cfeba7df3a21628b4fd713d03 Mon Sep 17 00:00:00 2001 From: zeroXbrock <2791467+zeroXbrock@users.noreply.github.com> Date: Thu, 26 Mar 2026 14:41:06 -0700 Subject: [PATCH 2/2] consolidate report params --- crates/cli/src/commands/campaign.rs | 17 +++--- crates/cli/src/commands/spam.rs | 13 ++--- crates/cli/src/main.rs | 32 +++++++----- crates/report/src/command.rs | 81 ++++++++++++++++++++++------- 4 files changed, 89 insertions(+), 54 deletions(-) diff --git a/crates/cli/src/commands/campaign.rs b/crates/cli/src/commands/campaign.rs index db41bfb4..f47c9853 100644 --- a/crates/cli/src/commands/campaign.rs +++ b/crates/cli/src/commands/campaign.rs @@ -13,6 +13,7 @@ use crate::BuiltinScenarioCli; use alloy::primitives::{keccak256, U256}; use clap::Args; use contender_core::error::RuntimeParamErrorKind; +use contender_report::command::ReportParams; use contender_testfile::{CampaignConfig, CampaignMode, ResolvedMixEntry, ResolvedStage}; use std::path::Path; use std::time::Duration; @@ -270,16 +271,12 @@ pub async fn run_campaign( run_ids.sort_unstable(); let first_run = *run_ids.first().expect("run IDs exist"); let last_run = *run_ids.last().expect("run IDs exist"); - contender_report::command::report( - Some(last_run), - last_run - first_run, - db, - data_dir, - false, // use HTML format by default for campaign reports - args.skip_tx_traces, - args.time_to_inclusion_bucket, - ) - .await?; + let report_params = ReportParams::new() + .with_skip_tx_traces(args.skip_tx_traces) + .with_time_to_inclusion_bucket(args.time_to_inclusion_bucket) + .with_last_run_id(last_run) + .with_preceding_runs(last_run - first_run); + contender_report::command::report(db, data_dir, report_params).await?; } } diff --git a/crates/cli/src/commands/spam.rs b/crates/cli/src/commands/spam.rs index ef3c28d7..1864b68d 100644 --- a/crates/cli/src/commands/spam.rs +++ b/crates/cli/src/commands/spam.rs @@ -38,6 +38,7 @@ use contender_core::{ use contender_engine_provider::{ reth_node_api::EngineApiMessageVersion, AuthProvider, ControlChain, }; +use contender_report::command::ReportParams; use contender_testfile::TestConfig; use op_alloy_network::{Ethereum, Optimism}; use serde::Serialize; @@ -948,16 +949,8 @@ pub async fn spam( let run_id = spam_inner(db, &mut test_scenario, args, run_context).await?; if args.spam_args.gen_report { if let Some(run_id) = &run_id { - contender_report::command::report( - Some(*run_id), - 0, - db, - &resolve_data_dir(None)?, - false, // TODO: support JSON reports, maybe add a CLI flag for it - false, - 1000, - ) - .await?; + let report_params = ReportParams::new().with_last_run_id(*run_id); + contender_report::command::report(db, &resolve_data_dir(None)?, report_params).await?; } else { warn!("Cannot generate report: no run ID found."); } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index dc185dfc..43df1f4c 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -18,6 +18,7 @@ use commands::{ SpamScenario, }; use contender_core::{db::DbOps, util::TracingOptions}; +use contender_report::command::ReportParams; use contender_sqlite::{SqliteDb, DB_VERSION}; use default_scenarios::{fill_block::FillBlockCliArgs, BuiltinScenarioCli}; use error::CliError; @@ -134,6 +135,7 @@ async fn run() -> Result<(), CliError> { skip_tx_traces, time_to_inclusion_bucket, } => { + let use_json = matches!(format, ReportFormat::Json); if let Some(campaign_id) = campaign_id { let resolved_campaign_id = if campaign_id == "__LATEST_CAMPAIGN__" { db.latest_campaign_id() @@ -149,28 +151,30 @@ async fn run() -> Result<(), CliError> { if preceding_runs > 0 { warn!("--preceding-runs is ignored when --campaign is provided"); } + let report_params = ReportParams::new() + .with_skip_tx_traces(skip_tx_traces) + .with_time_to_inclusion_bucket(time_to_inclusion_bucket) + .with_use_json(use_json); contender_report::command::report_campaign( &resolved_campaign_id, &db, &data_dir, - skip_tx_traces, - time_to_inclusion_bucket, + report_params, ) .await .map_err(CliError::Report)?; } else { - let use_json = matches!(format, ReportFormat::Json); - contender_report::command::report( - last_run_id, - preceding_runs, - &db, - &data_dir, - use_json, - skip_tx_traces, - time_to_inclusion_bucket, - ) - .await - .map_err(CliError::Report)?; + let mut report_params = ReportParams::new() + .with_preceding_runs(preceding_runs) + .with_skip_tx_traces(skip_tx_traces) + .with_time_to_inclusion_bucket(time_to_inclusion_bucket) + .with_use_json(use_json); + if let Some(last_run_id) = last_run_id { + report_params = report_params.with_last_run_id(last_run_id); + } + contender_report::command::report(&db, &data_dir, report_params) + .await + .map_err(CliError::Report)?; } } diff --git a/crates/report/src/command.rs b/crates/report/src/command.rs index db186a9a..9a2e307e 100644 --- a/crates/report/src/command.rs +++ b/crates/report/src/command.rs @@ -40,14 +40,61 @@ pub struct GasQuantiles { pub p99: u128, } -pub async fn report( +pub struct ReportParams { last_run_id: Option, preceding_runs: u64, - db: &(impl DbOps + Clone + Send + Sync + 'static), - data_dir: &Path, use_json: bool, skip_tx_traces: bool, time_to_inclusion_bucket: u64, +} + +impl Default for ReportParams { + fn default() -> Self { + ReportParams { + last_run_id: None, + preceding_runs: 0, + use_json: false, + skip_tx_traces: false, + time_to_inclusion_bucket: 1000, + } + } +} + +impl ReportParams { + pub fn new() -> Self { + Self::default() + } + + pub fn with_last_run_id(mut self, run_id: u64) -> Self { + self.last_run_id = Some(run_id); + self + } + + pub fn with_preceding_runs(mut self, preceding_runs: u64) -> Self { + self.preceding_runs = preceding_runs; + self + } + + pub fn with_use_json(mut self, use_json: bool) -> Self { + self.use_json = use_json; + self + } + + pub fn with_skip_tx_traces(mut self, skip_tx_traces: bool) -> Self { + self.skip_tx_traces = skip_tx_traces; + self + } + + pub fn with_time_to_inclusion_bucket(mut self, time_to_inclusion_bucket: u64) -> Self { + self.time_to_inclusion_bucket = time_to_inclusion_bucket; + self + } +} + +pub async fn report( + db: &(impl DbOps + Clone + Send + Sync + 'static), + data_dir: &Path, + params: ReportParams, ) -> Result<()> { let num_runs = db.num_runs().map_err(|e| e.into())?; @@ -62,7 +109,7 @@ pub async fn report( } // if id is provided, check if it's valid - let end_run_id = if let Some(id) = last_run_id { + let end_run_id = if let Some(id) = params.last_run_id { if id == 0 || id > num_runs { return Err(Error::InvalidRunId(id)); } @@ -80,7 +127,7 @@ pub async fn report( .rpc_url; // collect CSV report for each run_id - let start_run_id = end_run_id - preceding_runs; + let start_run_id = end_run_id - params.preceding_runs; let mut all_txs = vec![]; let reports_dir = data_dir.join("reports"); for id in start_run_id..=end_run_id { @@ -154,7 +201,7 @@ pub async fn report( } else { get_block_data(&all_txs, &rpc_client).await? }; - let trace_data = if skip_tx_traces { + let trace_data = if params.skip_tx_traces { info!("Skipping per-transaction traces (--skip-tx-traces)"); vec![] } else { @@ -309,7 +356,7 @@ pub async fn report( let heatmap = HeatMapChart::new(&cache_data.traces)?; let gas_per_block = GasPerBlockChart::new(&cache_data.blocks); - let tti = TimeToInclusionChart::new(&all_txs, time_to_inclusion_bucket); + let tti = TimeToInclusionChart::new(&all_txs, params.time_to_inclusion_bucket); let gas_used = TxGasUsedChart::new(&cache_data.traces, 4000); let pending_txs = PendingTxsChart::new(&all_txs); let flashblock_tti = FlashblockTimeToInclusionChart::new(&all_txs); @@ -363,7 +410,7 @@ pub async fn report( let reports_dir = data_dir.join("reports"); - if use_json { + if params.use_json { // JSON output - no browser opening let report_path = build_json_report(&report_metadata, &reports_dir)?; info!("saved JSON report to {report_path:?}"); @@ -443,8 +490,7 @@ pub async fn report_campaign( campaign_id: &str, db: &(impl DbOps + Clone + Send + Sync + 'static), data_dir: &Path, - skip_tx_traces: bool, - time_to_inclusion_bucket: u64, + params: ReportParams, ) -> Result<()> { let runs = db.get_runs_by_campaign(campaign_id).map_err(|e| e.into())?; if runs.is_empty() { @@ -470,16 +516,11 @@ pub async fn report_campaign( let run_generation_result: Result<()> = async { for run in &runs { // generate per-run report (single run) - always use HTML for campaign runs - report( - Some(run.id), - 0, - db, - data_dir, - false, - skip_tx_traces, - time_to_inclusion_bucket, - ) - .await?; + let params = ReportParams::new() + .with_skip_tx_traces(params.skip_tx_traces) + .with_time_to_inclusion_bucket(params.time_to_inclusion_bucket) + .with_last_run_id(run.id); + report(db, data_dir, params).await?; let run_txs = db.get_run_txs(run.id).map_err(|e| e.into())?; let (run_tx_count_from_logs, run_error_count_from_logs) = tx_and_error_counts(&run_txs, run.tx_count);