diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c3bd58a6..1625b9b5 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -23,7 +23,8 @@ jobs: with: cache-on-failure: true - name: Install deps - run: sudo apt-get install -y fontconfig libfontconfig1-dev libfontconfig + run: | + sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig uuid-dev - run: cargo clippy --workspace --lib --examples --tests --benches --all-features --locked env: RUSTFLAGS: -D warnings @@ -51,6 +52,7 @@ jobs: with: cache-on-failure: true - name: Install deps - run: sudo apt-get install -y fontconfig libfontconfig1-dev libfontconfig + run: | + sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig uuid-dev - uses: taiki-e/install-action@cargo-udeps - run: cargo udeps --workspace --lib --examples --tests --benches --all-features --locked diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6f0a8bd8..b2e50687 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,7 +26,7 @@ jobs: with: cache-on-failure: true - name: Install deps - run: sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig + run: sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig uuid-dev - name: Build run: cargo build --verbose --workspace @@ -50,6 +50,6 @@ jobs: with: cache-on-failure: true - name: Install deps - run: sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig + run: sudo apt-get update && sudo apt-get install -y libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig uuid-dev - name: Run tests run: cargo test --verbose --workspace diff --git a/CHANGELOG.md b/CHANGELOG.md index edcec58c..3c0aa209 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,10 @@ Features: - scenarios: add groth16Verify scenario to test onchain proof verification ([#379](https://github.com/flashbots/contender/pull/379)) - spammer: support batching json-rpc eth_sendRawTransaction reqs ([#381](https://github.com/flashbots/contender/pull/381)) - minor UX improvements ([#382](https://github.com/flashbots/contender/pull/382)) +- campaign meta-scenarios: new `contender campaign` command and campaign TOML schema for staged parallel mixes Internal changes: -- revamp error handling ([#378](https://github.com/flashbots/contender/pull/378)) \ No newline at end of file +- revamp error handling ([#378](https://github.com/flashbots/contender/pull/378)) +- DB schema bumped to `user_version = 6` to record campaign/stage metadata in runs. + - If you see a DB version mismatch, export/reset your DB: `contender db export` (optional backup) then `contender db reset` (or `drop`) to recreate with the new schema. \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index fecc27b7..6a69af32 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2171,6 +2171,7 @@ dependencies = [ "tracing", "tracing-subscriber 0.3.20", "url", + "uuid", "webbrowser", ] @@ -10665,9 +10666,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.18.1" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" dependencies = [ "getrandom 0.3.4", "js-sys", diff --git a/Cargo.toml b/Cargo.toml index 13220f13..96238cc8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ clap = { version = "4.5.16" } csv = "1.3.0" miette = { version = "7.6.0" } url = "2.5.7" +uuid = "1.19.0" ## core futures = "0.3.30" diff --git a/Dockerfile b/Dockerfile index f23acd47..8af387f8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,8 @@ FROM rust:slim AS builder # Install build dependencies RUN apt-get update && \ - apt-get install -y make curl git libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig libssl-dev libclang-dev + apt-get install -y make curl git libsqlite3-dev fontconfig libfontconfig1-dev libfontconfig libssl-dev libclang-dev uuid-dev && \ + rm -rf /var/lib/apt/lists/* # Copy in project files COPY . /app diff --git a/README.md b/README.md index e971a45d..5c102573 100644 --- a/README.md +++ b/README.md @@ -80,6 +80,7 @@ It supports both **per-second** (TPS) and **per-block** (TPB) timing, seeded fuz - [Engine API Spamming](docs/engine-api.md) - [Reports, Database, and Admin Tools](docs/reports-db-admin.md) - [Using Contender as a Library](docs/library-usage.md) +- [Composite Campaigns](docs/campaigns.md) ### 5. Internals - [Architecture](docs/architecture.md) diff --git a/campaigns/composite.toml b/campaigns/composite.toml new file mode 100644 index 00000000..386e5364 --- /dev/null +++ b/campaigns/composite.toml @@ -0,0 +1,19 @@ +name = "devnet-steady" +description = "Steady devnet load with a mix of erc20, uni_v2 and stress" + +[spam] +mode = "tps" +rate = 100 +duration = 60 + +[[spam.mix]] +scenario = "builtin:erc20" +share_pct = 40.0 + +[[spam.mix]] +scenario = "builtin:uni_v2" +share_pct = 40.0 + +[[spam.mix]] +scenario = "builtin:stress" +share_pct = 20.0 diff --git a/campaigns/staged-example.toml b/campaigns/staged-example.toml new file mode 100644 index 00000000..328e8354 --- /dev/null +++ b/campaigns/staged-example.toml @@ -0,0 +1,41 @@ +name = "staged-example" +description = "Warmup then steady-state blend across two stages" + +[spam] +mode = "tps" +rate = 150 # default rate if a stage omits one +duration = 60 # default duration per stage (seconds) +seed = 1337 + +[[spam.stage]] +name = "warmup" +rate = 100 +# duration = 60 + [[spam.stage.mix]] + scenario = "builtin:erc20" + share_pct = 90.0 + [[spam.stage.mix]] + scenario = "scenarios/groth16Verify.toml" + share_pct = 10.0 + +[[spam.stage]] +name = "steady" +rate = 500 +duration = 120 + [[spam.stage.mix]] + scenario = "builtin:erc20" + share_pct = 10.0 + [[spam.stage.mix]] + scenario = "scenarios/groth16Verify.toml" + share_pct = 90.0 + +[[spam.stage]] +name = "cooldown" +rate = 20 +# duration = 60 + [[spam.stage.mix]] + scenario = "builtin:erc20" + share_pct = 90.0 + [[spam.stage.mix]] + scenario = "scenarios/groth16Verify.toml" + share_pct = 10.0 \ No newline at end of file diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 8732633f..c828ae64 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -47,6 +47,7 @@ regex = { workspace = true } thiserror = { workspace = true } miette = { workspace = true, features = ["fancy"] } url = { workspace = true } +uuid = { workspace = true } [dev-dependencies] tempfile = "3.15.0" diff --git a/crates/cli/src/commands/campaign.rs b/crates/cli/src/commands/campaign.rs new file mode 100644 index 00000000..f097a015 --- /dev/null +++ b/crates/cli/src/commands/campaign.rs @@ -0,0 +1,549 @@ +use super::{setup::SetupCommandArgs, spam::SpamCommandArgs, SpamScenario}; +use crate::commands::spam::SpamCampaignContext; +use crate::error::CliError; +use crate::util::load_testconfig; +use crate::util::{data_dir, load_seedfile, parse_duration}; +use crate::BuiltinScenarioCli; +use crate::{ + commands::{ + self, + common::{ScenarioSendTxsCliArgs, SendTxsCliArgsInner}, + SpamCliArgs, + }, + util::bold, +}; +use alloy::primitives::{keccak256, U256}; +use clap::Args; +use contender_core::db::DbOps; +use contender_core::error::RuntimeParamErrorKind; +use contender_testfile::{CampaignConfig, CampaignMode, ResolvedStage}; +use std::time::Duration; +use tracing::{debug, info, warn}; +use uuid::Uuid; + +#[derive(Clone, Debug, Args)] +pub struct CampaignCliArgs { + /// Path to campaign config TOML + #[arg(help = "Path to campaign config TOML")] + pub campaign: String, + + #[command(flatten)] + pub eth_json_rpc_args: SendTxsCliArgsInner, + + /// HTTP JSON-RPC URL to use for bundle spamming (must support `eth_sendBundle`). + #[arg( + env = "BUILDER_RPC_URL", + short, + long, + long_help = "HTTP JSON-RPC URL to use for bundle spamming (must support `eth_sendBundle`)", + visible_aliases = ["builder", "builder-rpc-url", "builder-rpc"] + )] + pub builder_url: Option, + + /// The time to wait for pending transactions to land, in blocks. + #[arg( + short = 'w', + long, + default_value_t = 12, + long_help = "The number of blocks to wait for pending transactions to land. If transactions land within the timeout, it resets.", + visible_aliases = ["wait"] + )] + pub pending_timeout: u64, + + /// The number of accounts to generate for each agent (`from_pool` in scenario files) + #[arg( + short, + long, + visible_aliases = ["na", "accounts"], + default_value_t = 10 + )] + pub accounts_per_agent: u64, + + /// Max number of txs to send in a single json-rpc batch request. + #[arg( + long = "rpc-batch-size", + value_name = "N", + default_value_t = 0, + long_help = "Max number of eth_sendRawTransaction calls to send in a single JSON-RPC batch request. 0 (default) disables batching and sends one eth_sendRawTransaction per tx." + )] + pub rpc_batch_size: u64, + + /// Ignore receipts (fire-and-forget). + #[arg( + long, + help = "Ignore transaction receipts.", + long_help = "Keep sending transactions without waiting for receipts.", + visible_aliases = ["ir", "no-receipts"] + )] + pub ignore_receipts: bool, + + /// Disable nonce synchronization between batches. + #[arg( + long, + help = "Disable nonce synchronization between batches.", + visible_aliases = ["disable-nonce-sync", "fast-nonces"] + )] + pub optimistic_nonces: bool, + + /// Generate report after campaign finishes. + #[arg( + long, + long_help = "Generate a report for the spam run(s) after the campaign completes.", + visible_aliases = ["report"] + )] + pub gen_report: bool, + + /// Re-deploy contracts in builtin scenarios. + #[arg( + long, + global = true, + long_help = "If set, re-deploy contracts that have already been deployed. Only builtin scenarios are affected." + )] + pub redeploy: bool, + + /// Skip setup steps when running builtin scenarios. + #[arg( + long, + global = true, + long_help = "If set, skip contract deployment & setup transactions when running builtin scenarios. Does nothing when running a scenario file." + )] + pub skip_setup: bool, + + /// The time to wait for spammer to recover from failure before stopping contender. + #[arg( + long = "timeout", + long_help = "The time to wait for spammer to recover from failure before stopping contender.", + value_parser = parse_duration, + default_value = "5min" + )] + pub spam_timeout: Duration, +} + +fn bump_seed(base_seed: &str, stage_name: &str) -> String { + let compound_hash = keccak256(base_seed).bit_or(keccak256(stage_name)); + U256::from_be_bytes(compound_hash.0).to_string() +} + +pub async fn run_campaign( + db: &(impl DbOps + Clone + Send + Sync + 'static), + args: CampaignCliArgs, +) -> Result<(), CliError> { + let campaign = CampaignConfig::from_file(&args.campaign)?; + let stages = campaign.resolve()?; + validate_stage_rates(&stages, &args).await?; + let campaign_id = Uuid::new_v4().to_string(); + + if args.redeploy && args.skip_setup { + return Err(RuntimeParamErrorKind::InvalidArgs(format!( + "{} and {} cannot be passed together", + bold("--redeploy"), + bold("--skip-setup") + )) + .into()); + } + + let base_seed = args + .eth_json_rpc_args + .seed + .clone() + .or_else(|| campaign.spam.seed.map(|s| s.to_string())) + .unwrap_or(load_seedfile()?); + + // Setup phase. Skip builtin scenarios since they do their own setup at spam time. + let provider = args.eth_json_rpc_args.new_rpc_provider()?; + if !args.skip_setup { + for scenario_label in campaign.setup_scenarios() { + let scenario = match parse_builtin_reference(&scenario_label) { + Some(builtin) => SpamScenario::Builtin( + builtin + .to_builtin_scenario( + &provider, + &create_spam_cli_args(None, &args, CampaignMode::Tps, 1, 1), + /* TODO: KLUDGE: + - I don't think a `BuiltinScenarioCli` *needs* `rate` or `duration` -- that's for the spammer. + - we should use a different interface for `to_builtin_scenario` (replace `SpamCliArgs`) + */ + ) + .await?, + ), + None => SpamScenario::Testfile(scenario_label.to_owned()), + }; + let mut setup_args = args.eth_json_rpc_args.clone(); + setup_args.seed = Some(base_seed.clone()); + let setup_cmd = SetupCommandArgs::new(scenario, setup_args)?; + commands::setup(db, setup_cmd).await?; + } + } + + let mut run_ids = vec![]; + + for (stage_idx, stage) in stages.iter().enumerate() { + info!( + campaign_id = %campaign_id, + campaign_name = %campaign.name, + "Starting campaign stage {}: {} ({}={})", + stage_idx + 1, + stage.name, + match campaign.spam.mode { + CampaignMode::Tps => "tps", + CampaignMode::Tpb => "tpb", + }, + stage.rate + ); + + // Avoid nonce conflicts: override_senders would share a single EOA across mixes. + if args.eth_json_rpc_args.override_senders && stage.mix.len() > 1 { + return Err(RuntimeParamErrorKind::InvalidArgs( + "override-senders cannot be used when a stage has multiple mix entries; it would share one sender across mixes and cause nonce conflicts".into(), + ) + .into()); + } + + let stage_seed = bump_seed(&base_seed, &stage.name); + + // Execute stage with optional timeout + let stage_run_ids = if let Some(timeout_secs) = stage.stage_timeout { + let timeout_duration = std::time::Duration::from_secs(timeout_secs); + match tokio::time::timeout( + timeout_duration, + execute_stage(db, &campaign, stage, &args, &campaign_id, &stage_seed), + ) + .await + { + Ok(result) => result?, + Err(_) => { + return Err(RuntimeParamErrorKind::InvalidArgs(format!( + "Stage '{}' exceeded timeout of {} seconds", + stage.name, timeout_secs + )) + .into()); + } + } + } else { + execute_stage(db, &campaign, stage, &args, &campaign_id, &stage_seed).await? + }; + + run_ids.extend(stage_run_ids); + } + + if args.gen_report { + if run_ids.is_empty() { + warn!("No runs found for campaign, skipping report."); + } else { + run_ids.sort_unstable(); + let first_run = *run_ids.first().expect("run IDs exist"); + let last_run = *run_ids.last().expect("run IDs exist"); + contender_report::command::report( + Some(last_run), + last_run - first_run, + db, + &data_dir()?, + ) + .await?; + } + } + + Ok(()) +} + +async fn validate_stage_rates( + stages: &[ResolvedStage], + _args: &CampaignCliArgs, +) -> Result<(), CliError> { + for stage in stages { + for mix in &stage.mix { + if mix.rate == 0 { + continue; + } + if parse_builtin_reference(&mix.scenario).is_some() { + continue; + } + let cfg = load_testconfig(&mix.scenario).await?; + let spam_len = cfg.spam.as_ref().map(|s| s.len()).unwrap_or(0) as u64; + if spam_len == 0 { + return Err(RuntimeParamErrorKind::InvalidArgs(format!( + "Stage '{}' scenario '{}' has no spam entries defined.", + stage.name, mix.scenario + )) + .into()); + } + // Check if rate * duration is sufficient to cover all spam entries + let total_txs = mix.rate * stage.duration; + if total_txs < spam_len { + return Err(RuntimeParamErrorKind::InvalidArgs(format!( + "Stage '{}' scenario '{}': insufficient transactions (rate {} * duration {} = {}) to cover {} spam entries. Minimum rate needed: {}", + stage.name, mix.scenario, mix.rate, stage.duration, total_txs, spam_len, + spam_len.div_ceil(stage.duration) // ceiling division + )) + .into()); + } + } + } + Ok(()) +} + +fn create_spam_cli_args( + testfile: Option, + args: &CampaignCliArgs, + spam_mode: CampaignMode, + spam_rate: u64, + spam_duration: u64, +) -> SpamCliArgs { + SpamCliArgs { + eth_json_rpc_args: ScenarioSendTxsCliArgs { + testfile, + rpc_args: args.eth_json_rpc_args.clone(), + }, + spam_args: crate::commands::common::SendSpamCliArgs { + builder_url: args.builder_url.clone(), + txs_per_second: if matches!(spam_mode, CampaignMode::Tps) { + Some(spam_rate) + } else { + None + }, + txs_per_block: if matches!(spam_mode, CampaignMode::Tpb) { + Some(spam_rate) + } else { + None + }, + duration: spam_duration, + pending_timeout: args.pending_timeout, + loops: Some(Some(1)), + accounts_per_agent: args.accounts_per_agent, + }, + ignore_receipts: args.ignore_receipts, + optimistic_nonces: args.optimistic_nonces, + gen_report: false, + spam_timeout: args.spam_timeout, + redeploy: args.redeploy, + skip_setup: true, + rpc_batch_size: args.rpc_batch_size, + } +} + +async fn execute_stage( + db: &(impl DbOps + Clone + Send + Sync + 'static), + campaign: &CampaignConfig, + stage: &ResolvedStage, + args: &CampaignCliArgs, + campaign_id: &str, + stage_seed: &str, +) -> Result, CliError> { + let mut handles = vec![]; + let mut run_ids = vec![]; + + // Validate that at least one scenario has non-zero rate + if stage.mix.iter().all(|mix| mix.rate == 0) { + return Err(RuntimeParamErrorKind::InvalidArgs(format!( + "Stage '{}' has no scenarios with non-zero rate after resolution", + stage.name + )) + .into()); + } + + // Create a barrier to synchronize parallel task starts + let active_scenario_count = stage.mix.iter().filter(|mix| mix.rate > 0).count(); + let barrier = std::sync::Arc::new(tokio::sync::Barrier::new(active_scenario_count)); + + for (mix_idx, mix) in stage.mix.iter().enumerate() { + if mix.rate == 0 { + continue; + } + let mix = mix.clone(); + let scenario_seed = bump_seed(stage_seed, &mix_idx.to_string()); + let mut args = args.to_owned(); + args.eth_json_rpc_args.seed = Some(scenario_seed.clone()); + debug!("mix {mix_idx} seed: {}", scenario_seed); + + let spam_cli_args = create_spam_cli_args( + Some(mix.scenario.clone()), + &args, + campaign.spam.mode, + mix.rate, + stage.duration, + ); + + let spam_scenario = if let Some(builtin_cli) = parse_builtin_reference(&mix.scenario) { + let provider = args.eth_json_rpc_args.new_rpc_provider()?; + let builtin = builtin_cli + .to_builtin_scenario(&provider, &spam_cli_args) + .await?; + SpamScenario::Builtin(builtin) + } else { + SpamScenario::Testfile(mix.scenario.clone()) + }; + + let spam_args = SpamCommandArgs::new(spam_scenario, spam_cli_args)?; + let scenario = spam_args.init_scenario(db).await?; + let duration = stage.duration; + let db = db.clone(); + let campaign_id_owned = campaign_id.to_owned(); + let campaign_name = campaign.name.clone(); + let stage_name = stage.name.clone(); + let scenario_label = mix.scenario.clone(); + let ctx = SpamCampaignContext { + campaign_id: Some(campaign_id_owned.clone()), + campaign_name: Some(campaign_name.clone()), + stage_name: Some(stage.name.clone()), + scenario_name: Some(mix.scenario.clone()), + }; + let rate = mix.rate; + let barrier_clone = barrier.clone(); + info!( + campaign_id = %campaign_id_owned, + campaign_name = %campaign_name, + stage = %stage_name, + scenario = %scenario_label, + mode = ?campaign.spam.mode, + rate, + duration, + "Starting campaign scenario spammer", + ); + let handle = tokio::spawn(async move { + // Wait for all parallel scenarios to be ready before starting + barrier_clone.wait().await; + + let mut scenario = scenario; + let run_res = commands::spam(&db, &spam_args, &mut scenario, ctx).await; + match run_res { + Ok(Some(run_id)) => { + info!( + campaign_id = %campaign_id_owned, + campaign_name = %campaign_name, + stage = %stage_name, + scenario = %scenario_label, + run_id, + "Finished campaign scenario spammer" + ); + Ok(Some(run_id)) + } + Ok(None) => { + warn!( + campaign_id = %campaign_id_owned, + campaign_name = %campaign_name, + stage = %stage_name, + scenario = %scenario_label, + "Campaign scenario finished without recording a run_id" + ); + Ok(None) + } + Err(e) => Err(e), + } + }); + handles.push(handle); + } + + for handle in handles { + if let Some(run_id) = handle.await?? { + run_ids.push(run_id); + } + } + + Ok(run_ids) +} + +fn strip_builtin_name(name: impl AsRef) -> String { + name.as_ref() + .trim() + .trim_start_matches("builtin:") + .to_owned() +} + +fn parse_builtin_reference(name: &str) -> Option { + let norm = strip_builtin_name(name).to_lowercase(); + match norm.as_str() { + "erc20" => Some(BuiltinScenarioCli::Erc20(Default::default())), + "revert" | "reverts" => Some(BuiltinScenarioCli::Revert(Default::default())), + "stress" => Some(BuiltinScenarioCli::Stress(Default::default())), + "uni_v2" | "univ2" | "uni-v2" => Some(BuiltinScenarioCli::UniV2(Default::default())), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use contender_testfile::{ResolvedMixEntry, ResolvedStage}; + use std::sync::Arc; + use tokio::sync::{Barrier, Mutex}; + use tokio::time::{sleep, Duration}; + + fn test_stage(name: &str) -> ResolvedStage { + ResolvedStage { + name: name.to_string(), + rate: 1, + duration: 1, + stage_timeout: None, + mix: vec![ + ResolvedMixEntry { + scenario: "s1".to_string(), + share_pct: 50.0, + rate: 1, + }, + ResolvedMixEntry { + scenario: "s2".to_string(), + share_pct: 50.0, + rate: 1, + }, + ], + } + } + + #[tokio::test] + async fn stages_run_sequentially() { + let stages = vec![test_stage("first"), test_stage("second")]; + let events = Arc::new(Mutex::new(Vec::new())); + + for s in &stages { + { + let mut ev = events.lock().await; + ev.push(format!("start-{}", s.name)); + } + // simulate work + sleep(Duration::from_millis(5)).await; + { + let mut ev = events.lock().await; + ev.push(format!("end-{}", s.name)); + } + } + + let ev = events.lock().await; + assert_eq!( + ev.as_slice(), + &["start-first", "end-first", "start-second", "end-second"] + ); + } + + #[tokio::test] + async fn stage_mixes_run_in_parallel() { + let s = test_stage("parallel"); + let barrier = Arc::new(Barrier::new(s.mix.len() + 1)); + let starts = Arc::new(Mutex::new(Vec::new())); + let mut handles = Vec::new(); + + for mix in s.mix.clone() { + let b = barrier.clone(); + let starts = starts.clone(); + handles.push(tokio::spawn(async move { + { + let mut st = starts.lock().await; + st.push(mix.scenario.clone()); + } + // wait for all tasks to reach this point + b.wait().await; + Ok::<(), ()>(()) + })); + } + + // release all spawned tasks once they have all started + barrier.wait().await; + for h in handles { + h.await.unwrap().unwrap(); + } + + let st = starts.lock().await; + // all mixes started; order not important, but count must match + assert_eq!(st.len(), 2); + assert!(st.contains(&"s1".to_string())); + assert!(st.contains(&"s2".to_string())); + } +} diff --git a/crates/cli/src/commands/common.rs b/crates/cli/src/commands/common.rs index 576a2d87..1dcfe788 100644 --- a/crates/cli/src/commands/common.rs +++ b/crates/cli/src/commands/common.rs @@ -28,6 +28,12 @@ pub struct ScenarioSendTxsCliArgs { /// Example: `scenario:simple.toml` or `scenario:precompiles/modexp.toml` pub testfile: Option, + #[command(flatten)] + pub rpc_args: SendTxsCliArgsInner, +} + +#[derive(Clone, Debug, clap::Args)] +pub struct SendTxsCliArgsInner { /// RPC URL to send requests. #[arg( env = "RPC_URL", @@ -116,7 +122,7 @@ Requires --auth-rpc-url and --jwt-secret to be set.", pub override_senders: bool, } -impl ScenarioSendTxsCliArgs { +impl SendTxsCliArgsInner { pub fn rpc_url(&self) -> Result { Ok(Url::parse(self.rpc_url.as_ref())?) } diff --git a/crates/cli/src/commands/contender_subcommand.rs b/crates/cli/src/commands/contender_subcommand.rs index 2a12348e..83cf8f8b 100644 --- a/crates/cli/src/commands/contender_subcommand.rs +++ b/crates/cli/src/commands/contender_subcommand.rs @@ -1,6 +1,7 @@ use clap::Subcommand; use std::path::PathBuf; +use crate::commands::campaign::CampaignCliArgs; use crate::commands::common::ScenarioSendTxsCliArgs; use crate::commands::replay::ReplayCliArgs; use crate::default_scenarios::BuiltinScenarioCli; @@ -71,6 +72,27 @@ pub enum ContenderSubcommand { default_value = "0" )] preceding_runs: u64, + + /// Generate a campaign summary by campaign_id. + #[arg( + long, + help = "Generate reports for all runs associated with the given campaign ID.", + visible_alias = "campaign", + conflicts_with = "last_run_id", + value_name = "CAMPAIGN_ID", + num_args = 0..=1, + default_missing_value = "__LATEST_CAMPAIGN__" + )] + campaign_id: Option, + }, + + #[command( + name = "campaign", + long_about = "Run a composite/meta scenario described by a campaign file." + )] + Campaign { + #[command(flatten)] + args: Box, }, } diff --git a/crates/cli/src/commands/error.rs b/crates/cli/src/commands/error.rs index 79c827a8..af6c00f4 100644 --- a/crates/cli/src/commands/error.rs +++ b/crates/cli/src/commands/error.rs @@ -59,11 +59,11 @@ pub enum ArgsError { SpamRateNotFound, #[error( - "Not enough transactions per duration to cover all spam transactions.\nSet {} or {} to at least {min_tpd}", + "Not enough transactions per duration to cover all spam transactions (got {tpd}).\nSet {} or {} to at least {min_tpd}", bold("--txs-per-block (--tpb)"), bold("--txs-per-second (--tps)") )] - TransactionsPerDurationInsufficient { min_tpd: u64 }, + TransactionsPerDurationInsufficient { min_tpd: u64, tpd: u64 }, #[error( "invalid tx type for blob transactions (using '{current_type}'). must set tx type {}", diff --git a/crates/cli/src/commands/mod.rs b/crates/cli/src/commands/mod.rs index 3371e901..58dd131f 100644 --- a/crates/cli/src/commands/mod.rs +++ b/crates/cli/src/commands/mod.rs @@ -1,4 +1,5 @@ pub mod admin; +pub mod campaign; pub mod common; mod contender_subcommand; pub mod db; @@ -9,10 +10,9 @@ mod spam; mod spamd; use clap::Parser; - pub use contender_subcommand::{ContenderSubcommand, DbCommand}; pub use setup::{setup, SetupCommandArgs}; -pub use spam::{spam, EngineArgs, SpamCliArgs, SpamCommandArgs, SpamScenario}; +pub use spam::{spam, EngineArgs, SpamCampaignContext, SpamCliArgs, SpamCommandArgs, SpamScenario}; pub use spamd::spamd; use crate::error::CliError; diff --git a/crates/cli/src/commands/setup.rs b/crates/cli/src/commands/setup.rs index e16b6884..34f36d7e 100644 --- a/crates/cli/src/commands/setup.rs +++ b/crates/cli/src/commands/setup.rs @@ -1,7 +1,6 @@ -use super::common::ScenarioSendTxsCliArgs; use crate::{ commands::{ - common::EngineParams, + common::{EngineParams, SendTxsCliArgsInner}, error::{ArgsError, SetupError}, SpamScenario, }, @@ -31,7 +30,7 @@ pub async fn setup( db: &(impl contender_core::db::DbOps + Clone + Send + Sync + 'static), args: SetupCommandArgs, ) -> Result<(), CliError> { - let ScenarioSendTxsCliArgs { + let SendTxsCliArgsInner { min_balance, tx_type, env, @@ -222,12 +221,12 @@ pub async fn setup( pub struct SetupCommandArgs { pub scenario: SpamScenario, - pub eth_json_rpc_args: ScenarioSendTxsCliArgs, + pub eth_json_rpc_args: SendTxsCliArgsInner, pub seed: RandSeed, } impl SetupCommandArgs { - pub fn new(scenario: SpamScenario, cli_args: ScenarioSendTxsCliArgs) -> Result { + pub fn new(scenario: SpamScenario, cli_args: SendTxsCliArgsInner) -> Result { let seed = RandSeed::seed_from_str(&cli_args.seed.to_owned().unwrap_or(load_seedfile()?)); Ok(Self { scenario, diff --git a/crates/cli/src/commands/spam.rs b/crates/cli/src/commands/spam.rs index 8160730a..adbf341b 100644 --- a/crates/cli/src/commands/spam.rs +++ b/crates/cli/src/commands/spam.rs @@ -1,7 +1,7 @@ use super::common::{ScenarioSendTxsCliArgs, SendSpamCliArgs}; use crate::{ commands::{ - common::{EngineParams, TxTypeCli}, + common::{EngineParams, SendTxsCliArgsInner, TxTypeCli}, error::ArgsError, Result, }, @@ -161,22 +161,35 @@ pub struct SpamCommandArgs { pub seed: RandSeed, } +#[derive(Clone, Debug, Default)] +pub struct SpamCampaignContext { + pub campaign_id: Option, + pub campaign_name: Option, + pub stage_name: Option, + pub scenario_name: Option, +} + impl SpamCommandArgs { pub fn new(scenario: SpamScenario, cli_args: SpamCliArgs) -> Result { Ok(Self { scenario, spam_args: cli_args.clone(), seed: RandSeed::seed_from_str( - &cli_args.eth_json_rpc_args.seed.unwrap_or(load_seedfile()?), + &cli_args + .eth_json_rpc_args + .rpc_args + .seed + .unwrap_or(load_seedfile()?), ), }) } - async fn engine_params(&self) -> Result { + pub async fn engine_params(&self) -> Result { self.spam_args .eth_json_rpc_args + .rpc_args .auth_args - .engine_params(self.spam_args.eth_json_rpc_args.call_forkchoice) + .engine_params(self.spam_args.eth_json_rpc_args.rpc_args.call_forkchoice) .await } @@ -195,14 +208,14 @@ impl SpamCommandArgs { loops, accounts_per_agent, } = self.spam_args.spam_args.clone(); - let ScenarioSendTxsCliArgs { + let SendTxsCliArgsInner { min_balance, tx_type, bundle_type, env, override_senders, .. - } = self.spam_args.eth_json_rpc_args.clone(); + } = self.spam_args.eth_json_rpc_args.rpc_args.clone(); let mut testconfig = self.testconfig().await?; let spam_len = testconfig.spam.as_ref().map(|s| s.len()).unwrap_or(0); @@ -239,9 +252,10 @@ impl SpamCommandArgs { } // check if txs_per_duration is enough to cover the spam requests - if txs_per_duration < spam_len as u64 { + if (txs_per_duration * duration) < spam_len as u64 { return Err(ArgsError::TransactionsPerDurationInsufficient { min_tpd: spam_len as u64, + tpd: txs_per_duration, } .into()); } @@ -305,6 +319,7 @@ impl SpamCommandArgs { let user_signers = self .spam_args .eth_json_rpc_args + .rpc_args .user_signers_with_defaults(); // distill all from_pool arguments from the spam requests @@ -338,7 +353,11 @@ impl SpamCommandArgs { _ => tx_type.into(), }; - let rpc_client = self.spam_args.eth_json_rpc_args.new_rpc_provider()?; + let rpc_client = self + .spam_args + .eth_json_rpc_args + .rpc_args + .new_rpc_provider()?; let block_time = get_block_time(&rpc_client).await?; check_private_keys(&testconfig, &user_signers); @@ -356,7 +375,7 @@ impl SpamCommandArgs { let all_signer_addrs = agents.all_signer_addresses(); let params = TestScenarioParams { - rpc_url: self.spam_args.eth_json_rpc_args.rpc_url()?, + rpc_url: self.spam_args.eth_json_rpc_args.rpc_args.rpc_url()?, builder_rpc_url: builder_url .to_owned() .map(|url| Url::parse(&url).expect("Invalid builder URL")), @@ -484,6 +503,7 @@ impl SpamCommandArgs { pub async fn testconfig(&self) -> Result { self.spam_args .eth_json_rpc_args + .rpc_args .testconfig(&self.scenario) .await } @@ -552,6 +572,7 @@ pub async fn spam< db: &D, args: &SpamCommandArgs, test_scenario: &mut TestScenario, + run_context: SpamCampaignContext, ) -> Result> { let SpamCommandArgs { scenario, @@ -571,18 +592,25 @@ pub async fn spam< pending_timeout, .. } = spam_args; - let ScenarioSendTxsCliArgs { + let SendTxsCliArgsInner { auth_args, call_forkchoice, .. - } = eth_json_rpc_args; + } = eth_json_rpc_args.rpc_args; let engine_params = auth_args.engine_params(call_forkchoice).await?; let mut run_id = None; - let scenario_name = match scenario { + let base_scenario_name = match scenario { SpamScenario::Testfile(testfile) => testfile.to_owned(), SpamScenario::Builtin(scenario) => scenario.title(), }; + let scenario_name = run_context + .scenario_name + .clone() + .unwrap_or(base_scenario_name); + let campaign_id = run_context.campaign_id.clone(); + let campaign_name = run_context.campaign_name.clone(); + let stage_name = run_context.stage_name.clone(); let rpc_client = test_scenario.rpc_client.clone(); let auth_client = test_scenario.auth_provider.to_owned(); @@ -604,17 +632,19 @@ pub async fn spam< _ => err, }; - let (spammer, txs_per_batch) = if let Some(txs_per_block) = txs_per_block { + let (spammer, txs_per_batch, spam_duration) = if let Some(txs_per_block) = txs_per_block { info!("Blockwise spammer starting. Sending {txs_per_block} txs per block."); ( TypedSpammer::Blockwise(BlockwiseSpammer::new()), txs_per_block, + SpamDuration::Blocks(duration), ) } else if let Some(txs_per_second) = txs_per_second { info!("Timed spammer starting. Sending {txs_per_second} txs per second."); ( TypedSpammer::Timed(TimedSpammer::new(std::time::Duration::from_secs(1))), txs_per_second, + SpamDuration::Seconds(duration), ) } else { return Err(ArgsError::SpamRateNotFound.into()); @@ -628,7 +658,7 @@ pub async fn spam< test_scenario.ctx.cancel_token.clone(), ); - if callback.is_log() { + if callback.is_log() || run_context.campaign_id.is_some() { let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .expect("Time went backwards") @@ -637,15 +667,27 @@ pub async fn spam< timestamp: timestamp as usize, tx_count: (txs_per_batch * duration) as usize, scenario_name, + campaign_id: campaign_id.clone(), + campaign_name: campaign_name.clone(), + stage_name: stage_name.clone(), rpc_url: test_scenario.rpc_url.to_string(), txs_per_duration: txs_per_batch, - duration: SpamDuration::Blocks(duration), + duration: spam_duration, pending_timeout: Duration::from_secs(block_time * pending_timeout), }; run_id = Some( db.insert_run(&run) .map_err(|e| contender_core::Error::Db(e.into()))?, // TODO: revise this, we shouldn't need to use core errors here ); + if let Some(id) = run_id { + info!( + run_id = id, + campaign_id = campaign_id.as_deref().unwrap_or(""), + campaign_name = campaign_name.as_deref().unwrap_or(""), + stage = stage_name.as_deref().unwrap_or(""), + "Created spam run" + ); + } } spammer diff --git a/crates/cli/src/commands/spamd.rs b/crates/cli/src/commands/spamd.rs index ec6a202e..6c44136b 100644 --- a/crates/cli/src/commands/spamd.rs +++ b/crates/cli/src/commands/spamd.rs @@ -1,4 +1,4 @@ -use super::SpamCommandArgs; +use super::{SpamCampaignContext, SpamCommandArgs}; use crate::CliError; use crate::{ commands::{self}, @@ -68,7 +68,8 @@ pub async fn spamd( } let db = db.clone(); - let spam_res = commands::spam(&db, &args, &mut scenario).await; + let spam_res = + commands::spam(&db, &args, &mut scenario, SpamCampaignContext::default()).await; let wait_time = Duration::from_secs(3); if let Err(e) = spam_res { diff --git a/crates/cli/src/default_scenarios/builtin.rs b/crates/cli/src/default_scenarios/builtin.rs index 5a7b3fbb..df3bfced 100644 --- a/crates/cli/src/default_scenarios/builtin.rs +++ b/crates/cli/src/default_scenarios/builtin.rs @@ -90,6 +90,7 @@ impl BuiltinScenarioCli { BuiltinScenarioCli::Erc20(args) => { let seed = spam_args .eth_json_rpc_args + .rpc_args .seed .to_owned() .unwrap_or(load_seedfile()?); diff --git a/crates/cli/src/default_scenarios/contracts/TestToken.hex b/crates/cli/src/default_scenarios/contracts/TestToken.hex index ab3f7024..0326e97d 100644 --- a/crates/cli/src/default_scenarios/contracts/TestToken.hex +++ b/crates/cli/src/default_scenarios/contracts/TestToken.hex @@ -1 +1 @@ -0x60806040523480156200001157600080fd5b5060405162000b0938038062000b09833981016040819052620000349162000223565b604051806040016040528060098152602001682a32b9ba2a37b5b2b760b91b815250604051806040016040528060038152602001622a25a760e91b8152508160039081620000839190620002e4565b506004620000928282620002e4565b505050620000a73382620000ae60201b60201c565b50620003d8565b6001600160a01b038216620000de5760405163ec442f0560e01b8152600060048201526024015b60405180910390fd5b620000ec60008383620000f0565b5050565b6001600160a01b0383166200011f578060026000828254620001139190620003b0565b90915550620001939050565b6001600160a01b03831660009081526020819052604090205481811015620001745760405163391434e360e21b81526001600160a01b03851660048201526024810182905260448101839052606401620000d5565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b038216620001b157600280548290039055620001d0565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef836040516200021691815260200190565b60405180910390a3505050565b6000602082840312156200023657600080fd5b5051919050565b634e487b7160e01b600052604160045260246000fd5b600181811c908216806200026857607f821691505b6020821081036200028957634e487b7160e01b600052602260045260246000fd5b50919050565b601f821115620002df576000816000526020600020601f850160051c81016020861015620002ba5750805b601f850160051c820191505b81811015620002db57828155600101620002c6565b5050505b505050565b81516001600160401b038111156200030057620003006200023d565b620003188162000311845462000253565b846200028f565b602080601f831160018114620003505760008415620003375750858301515b600019600386901b1c1916600185901b178555620002db565b600085815260208120601f198616915b82811015620003815788860151825594840194600190910190840162000360565b5085821015620003a05787850151600019600388901b60f8161c191681555b5050505050600190811b01905550565b80820180821115620003d257634e487b7160e01b600052601160045260246000fd5b92915050565b61072180620003e86000396000f3fe608060405234801561001057600080fd5b50600436106100935760003560e01c8063313ce56711610066578063313ce567146100fe57806370a082311461010d57806395d89b4114610136578063a9059cbb1461013e578063dd62ed3e1461015157600080fd5b806306fdde0314610098578063095ea7b3146100b657806318160ddd146100d957806323b872dd146100eb575b600080fd5b6100a061018a565b6040516100ad919061056a565b60405180910390f35b6100c96100c43660046105d5565b61021c565b60405190151581526020016100ad565b6002545b6040519081526020016100ad565b6100c96100f93660046105ff565b610236565b604051601281526020016100ad565b6100dd61011b36600461063b565b6001600160a01b031660009081526020819052604090205490565b6100a061025a565b6100c961014c3660046105d5565b610269565b6100dd61015f36600461065d565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b60606003805461019990610690565b80601f01602080910402602001604051908101604052809291908181526020018280546101c590610690565b80156102125780601f106101e757610100808354040283529160200191610212565b820191906000526020600020905b8154815290600101906020018083116101f557829003601f168201915b5050505050905090565b60003361022a818585610277565b60019150505b92915050565b600033610244858285610289565b61024f85858561030c565b506001949350505050565b60606004805461019990610690565b60003361022a81858561030c565b610284838383600161036b565b505050565b6001600160a01b03838116600090815260016020908152604080832093861683529290522054600019811461030657818110156102f757604051637dc7a0d960e11b81526001600160a01b038416600482015260248101829052604481018390526064015b60405180910390fd5b6103068484848403600061036b565b50505050565b6001600160a01b03831661033657604051634b637e8f60e11b8152600060048201526024016102ee565b6001600160a01b0382166103605760405163ec442f0560e01b8152600060048201526024016102ee565b610284838383610440565b6001600160a01b0384166103955760405163e602df0560e01b8152600060048201526024016102ee565b6001600160a01b0383166103bf57604051634a1406b160e11b8152600060048201526024016102ee565b6001600160a01b038085166000908152600160209081526040808320938716835292905220829055801561030657826001600160a01b0316846001600160a01b03167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9258460405161043291815260200190565b60405180910390a350505050565b6001600160a01b03831661046b57806002600082825461046091906106ca565b909155506104dd9050565b6001600160a01b038316600090815260208190526040902054818110156104be5760405163391434e360e21b81526001600160a01b038516600482015260248101829052604481018390526064016102ee565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b0382166104f957600280548290039055610518565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161055d91815260200190565b60405180910390a3505050565b60006020808352835180602085015260005b818110156105985785810183015185820160400152820161057c565b506000604082860101526040601f19601f8301168501019250505092915050565b80356001600160a01b03811681146105d057600080fd5b919050565b600080604083850312156105e857600080fd5b6105f1836105b9565b946020939093013593505050565b60008060006060848603121561061457600080fd5b61061d846105b9565b925061062b602085016105b9565b9150604084013590509250925092565b60006020828403121561064d57600080fd5b610656826105b9565b9392505050565b6000806040838503121561067057600080fd5b610679836105b9565b9150610687602084016105b9565b90509250929050565b600181811c908216806106a457607f821691505b6020821081036106c457634e487b7160e01b600052602260045260246000fd5b50919050565b8082018082111561023057634e487b7160e01b600052601160045260246000fdfea2646970667358221220a39541e410f65911c43f31309b31f8820573a9b5537c0bb5e14096efb9bff7ba64736f6c63430008170033 \ No newline at end of file +0x608060405234801561000f575f5ffd5b506040516115fb3803806115fb833981810160405281019061003191906103aa565b6040518060400160405280600981526020017f54657374546f6b656e00000000000000000000000000000000000000000000008152506040518060400160405280600381526020017f544b4e000000000000000000000000000000000000000000000000000000000081525081600390816100ac9190610609565b5080600490816100bc9190610609565b5050506100cf33826100d560201b60201c565b506107ed565b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610145575f6040517fec442f0500000000000000000000000000000000000000000000000000000000815260040161013c9190610717565b60405180910390fd5b6101565f838361015a60201b60201c565b5050565b5f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff16036101aa578060025f82825461019e919061075d565b92505081905550610278565b5f5f5f8573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f2054905081811015610233578381836040517fe450d38c00000000000000000000000000000000000000000000000000000000815260040161022a9392919061079f565b60405180910390fd5b8181035f5f8673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f2081905550505b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16036102bf578060025f8282540392505081905550610309565b805f5f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825401925050819055505b8173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161036691906107d4565b60405180910390a3505050565b5f5ffd5b5f819050919050565b61038981610377565b8114610393575f5ffd5b50565b5f815190506103a481610380565b92915050565b5f602082840312156103bf576103be610373565b5b5f6103cc84828501610396565b91505092915050565b5f81519050919050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b7f4e487b71000000000000000000000000000000000000000000000000000000005f52602260045260245ffd5b5f600282049050600182168061045057607f821691505b6020821081036104635761046261040c565b5b50919050565b5f819050815f5260205f209050919050565b5f6020601f8301049050919050565b5f82821b905092915050565b5f600883026104c57fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8261048a565b6104cf868361048a565b95508019841693508086168417925050509392505050565b5f819050919050565b5f61050a61050561050084610377565b6104e7565b610377565b9050919050565b5f819050919050565b610523836104f0565b61053761052f82610511565b848454610496565b825550505050565b5f5f905090565b61054e61053f565b61055981848461051a565b505050565b5b8181101561057c576105715f82610546565b60018101905061055f565b5050565b601f8211156105c15761059281610469565b61059b8461047b565b810160208510156105aa578190505b6105be6105b68561047b565b83018261055e565b50505b505050565b5f82821c905092915050565b5f6105e15f19846008026105c6565b1980831691505092915050565b5f6105f983836105d2565b9150826002028217905092915050565b610612826103d5565b67ffffffffffffffff81111561062b5761062a6103df565b5b6106358254610439565b610640828285610580565b5f60209050601f831160018114610671575f841561065f578287015190505b61066985826105ee565b8655506106d0565b601f19841661067f86610469565b5f5b828110156106a657848901518255600182019150602085019450602081019050610681565b868310156106c357848901516106bf601f8916826105d2565b8355505b6001600288020188555050505b505050505050565b5f73ffffffffffffffffffffffffffffffffffffffff82169050919050565b5f610701826106d8565b9050919050565b610711816106f7565b82525050565b5f60208201905061072a5f830184610708565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f61076782610377565b915061077283610377565b925082820190508082111561078a57610789610730565b5b92915050565b61079981610377565b82525050565b5f6060820190506107b25f830186610708565b6107bf6020830185610790565b6107cc6040830184610790565b949350505050565b5f6020820190506107e75f830184610790565b92915050565b610e01806107fa5f395ff3fe608060405234801561000f575f5ffd5b5060043610610091575f3560e01c8063313ce56711610064578063313ce5671461013157806370a082311461014f57806395d89b411461017f578063a9059cbb1461019d578063dd62ed3e146101cd57610091565b806306fdde0314610095578063095ea7b3146100b357806318160ddd146100e357806323b872dd14610101575b5f5ffd5b61009d6101fd565b6040516100aa9190610a7a565b60405180910390f35b6100cd60048036038101906100c89190610b2b565b61028d565b6040516100da9190610b83565b60405180910390f35b6100eb6102af565b6040516100f89190610bab565b60405180910390f35b61011b60048036038101906101169190610bc4565b6102b8565b6040516101289190610b83565b60405180910390f35b6101396102d8565b6040516101469190610c2f565b60405180910390f35b61016960048036038101906101649190610c48565b6102e0565b6040516101769190610bab565b60405180910390f35b610187610325565b6040516101949190610a7a565b60405180910390f35b6101b760048036038101906101b29190610b2b565b6103b5565b6040516101c49190610b83565b60405180910390f35b6101e760048036038101906101e29190610c73565b6103d7565b6040516101f49190610bab565b60405180910390f35b60606003805461020c90610cde565b80601f016020809104026020016040519081016040528092919081815260200182805461023890610cde565b80156102835780601f1061025a57610100808354040283529160200191610283565b820191905f5260205f20905b81548152906001019060200180831161026657829003601f168201915b5050505050905090565b5f5f610297610459565b90506102a4818585610460565b600191505092915050565b5f600254905090565b5f6102c4843384610460565b6102cf848484610472565b90509392505050565b5f6012905090565b5f5f5f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f20549050919050565b60606004805461033490610cde565b80601f016020809104026020016040519081016040528092919081815260200182805461036090610cde565b80156103ab5780601f10610382576101008083540402835291602001916103ab565b820191905f5260205f20905b81548152906001019060200180831161038e57829003601f168201915b5050505050905090565b5f5f6103bf610459565b90506103cc8185856104a0565b600191505092915050565b5f60015f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f2054905092915050565b5f33905090565b61046d8383836001610590565b505050565b5f5f61047c610459565b905061048985828561075f565b6104948585856104a0565b60019150509392505050565b5f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1603610510575f6040517f96c6fd1e0000000000000000000000000000000000000000000000000000000081526004016105079190610d1d565b60405180910390fd5b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610580575f6040517fec442f050000000000000000000000000000000000000000000000000000000081526004016105779190610d1d565b60405180910390fd5b61058b8383836107f1565b505050565b5f73ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff1603610600575f6040517fe602df050000000000000000000000000000000000000000000000000000000081526004016105f79190610d1d565b60405180910390fd5b5f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1603610670575f6040517f94280d620000000000000000000000000000000000000000000000000000000081526004016106679190610d1d565b60405180910390fd5b8160015f8673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f8573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f20819055508015610759578273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925846040516107509190610bab565b60405180910390a35b50505050565b5f61076a84846103d7565b90507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff81146107eb57818110156107dc578281836040517ffb8f41b20000000000000000000000000000000000000000000000000000000081526004016107d393929190610d36565b60405180910390fd5b6107ea84848484035f610590565b5b50505050565b5f73ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1603610841578060025f8282546108359190610d98565b9250508190555061090f565b5f5f5f8573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f20549050818110156108ca578381836040517fe450d38c0000000000000000000000000000000000000000000000000000000081526004016108c193929190610d36565b60405180910390fd5b8181035f5f8673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f2081905550505b5f73ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1603610956578060025f82825403925050819055506109a0565b805f5f8473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020015f205f82825401925050819055505b8173ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef836040516109fd9190610bab565b60405180910390a3505050565b5f81519050919050565b5f82825260208201905092915050565b8281835e5f83830152505050565b5f601f19601f8301169050919050565b5f610a4c82610a0a565b610a568185610a14565b9350610a66818560208601610a24565b610a6f81610a32565b840191505092915050565b5f6020820190508181035f830152610a928184610a42565b905092915050565b5f5ffd5b5f73ffffffffffffffffffffffffffffffffffffffff82169050919050565b5f610ac782610a9e565b9050919050565b610ad781610abd565b8114610ae1575f5ffd5b50565b5f81359050610af281610ace565b92915050565b5f819050919050565b610b0a81610af8565b8114610b14575f5ffd5b50565b5f81359050610b2581610b01565b92915050565b5f5f60408385031215610b4157610b40610a9a565b5b5f610b4e85828601610ae4565b9250506020610b5f85828601610b17565b9150509250929050565b5f8115159050919050565b610b7d81610b69565b82525050565b5f602082019050610b965f830184610b74565b92915050565b610ba581610af8565b82525050565b5f602082019050610bbe5f830184610b9c565b92915050565b5f5f5f60608486031215610bdb57610bda610a9a565b5b5f610be886828701610ae4565b9350506020610bf986828701610ae4565b9250506040610c0a86828701610b17565b9150509250925092565b5f60ff82169050919050565b610c2981610c14565b82525050565b5f602082019050610c425f830184610c20565b92915050565b5f60208284031215610c5d57610c5c610a9a565b5b5f610c6a84828501610ae4565b91505092915050565b5f5f60408385031215610c8957610c88610a9a565b5b5f610c9685828601610ae4565b9250506020610ca785828601610ae4565b9150509250929050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52602260045260245ffd5b5f6002820490506001821680610cf557607f821691505b602082108103610d0857610d07610cb1565b5b50919050565b610d1781610abd565b82525050565b5f602082019050610d305f830184610d0e565b92915050565b5f606082019050610d495f830186610d0e565b610d566020830185610b9c565b610d636040830184610b9c565b949350505050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffd5b5f610da282610af8565b9150610dad83610af8565b9250828201905080821115610dc557610dc4610d6b565b5b9291505056fea26469706673582212207aa55ef63d6cdc9f6d84993b356269877b6406d3fd27b749d0f4eea41d4378f164736f6c634300081b0033 \ No newline at end of file diff --git a/crates/cli/src/default_scenarios/erc20.rs b/crates/cli/src/default_scenarios/erc20.rs index 843e60fd..5e1caf37 100644 --- a/crates/cli/src/default_scenarios/erc20.rs +++ b/crates/cli/src/default_scenarios/erc20.rs @@ -10,7 +10,7 @@ use crate::{ default_scenarios::{builtin::ToTestConfig, contracts::test_token}, }; -#[derive(Clone, Debug, clap::Parser)] +#[derive(Clone, Default, Debug, clap::Parser)] pub struct Erc20CliArgs { #[arg( short, @@ -33,7 +33,7 @@ pub struct Erc20CliArgs { #[arg( short = 'r', long = "recipient", - long_help = "The address to receive tokens sent by spam txs. By default, the sender receives their own tokens." + long_help = "The address to receive tokens sent by spam txs. By default, address(0) receives the tokens." )] pub token_recipient: Option
, } @@ -107,7 +107,7 @@ impl ToTestConfig for Erc20Args { value: None, min: Some(U256::from(1)), max: Some( - U256::from_str("0x00ffffffffffffffffffffffffffffffffffffffff").unwrap(), + U256::from_str("0x0000000000ffffffffffffffffffffffffffffffff").unwrap(), ), }]); } diff --git a/crates/cli/src/default_scenarios/revert.rs b/crates/cli/src/default_scenarios/revert.rs index 8eaac297..13e1e00f 100644 --- a/crates/cli/src/default_scenarios/revert.rs +++ b/crates/cli/src/default_scenarios/revert.rs @@ -15,6 +15,12 @@ pub struct RevertCliArgs { pub gas_use: u64, } +impl Default for RevertCliArgs { + fn default() -> Self { + Self { gas_use: 30_000 } + } +} + impl ToTestConfig for RevertCliArgs { fn to_testconfig(&self) -> contender_testfile::TestConfig { TestConfig { diff --git a/crates/cli/src/default_scenarios/storage.rs b/crates/cli/src/default_scenarios/storage.rs index 0c32d5dc..3d3f5f69 100644 --- a/crates/cli/src/default_scenarios/storage.rs +++ b/crates/cli/src/default_scenarios/storage.rs @@ -21,6 +21,15 @@ pub struct StorageStressCliArgs { pub num_iterations: u64, } +impl Default for StorageStressCliArgs { + fn default() -> Self { + Self { + num_slots: 500, + num_iterations: 1, + } + } +} + #[derive(Clone, Debug)] pub struct StorageStressArgs { pub num_slots: u64, diff --git a/crates/cli/src/default_scenarios/stress.rs b/crates/cli/src/default_scenarios/stress.rs index 9124aa46..726e02af 100644 --- a/crates/cli/src/default_scenarios/stress.rs +++ b/crates/cli/src/default_scenarios/stress.rs @@ -89,6 +89,23 @@ pub struct StressCliArgs { pub with_fails: bool, } +impl Default for StressCliArgs { + fn default() -> Self { + Self { + disable_storage: false, + disable_transfers: false, + disable_opcodes: None, + disable_precompiles: None, + disable_all_precompiles: false, + disable_all_opcodes: false, + storage: StorageStressCliArgs::default(), + transfers: TransferStressCliArgs::default(), + opcode_iterations: 10, + with_fails: false, + } + } +} + impl ToTestConfig for StressCliArgs { fn to_testconfig(&self) -> TestConfig { let mut configs = vec![]; diff --git a/crates/cli/src/default_scenarios/transfers.rs b/crates/cli/src/default_scenarios/transfers.rs index bb21b936..e86de6a5 100644 --- a/crates/cli/src/default_scenarios/transfers.rs +++ b/crates/cli/src/default_scenarios/transfers.rs @@ -24,6 +24,15 @@ pub struct TransferStressCliArgs { pub recipient: Option
, } +impl Default for TransferStressCliArgs { + fn default() -> Self { + Self { + amount: parse_amount("0.001 eth").expect("valid default amount"), + recipient: None, + } + } +} + #[derive(Clone, Debug)] pub struct TransferStressArgs { pub amount: U256, diff --git a/crates/cli/src/default_scenarios/uni_v2.rs b/crates/cli/src/default_scenarios/uni_v2.rs index 03a75bb4..70d97a8d 100644 --- a/crates/cli/src/default_scenarios/uni_v2.rs +++ b/crates/cli/src/default_scenarios/uni_v2.rs @@ -66,6 +66,18 @@ pub struct UniV2CliArgs { pub token_trade_amount: Option, } +impl Default for UniV2CliArgs { + fn default() -> Self { + Self { + num_tokens: 2, + weth_per_token: parse_amount("1 eth").expect("valid default amount"), + initial_token_supply: parse_amount("5000000 eth").expect("valid default amount"), + weth_trade_amount: None, + token_trade_amount: None, + } + } +} + #[derive(Debug, Clone)] pub struct UniV2Args { /// The number of tokens to create in the scenario. Each token will be paired with WETH and each other token. @@ -100,7 +112,7 @@ impl From for UniV2Args { .unwrap_or(args.weth_per_token / U256::from(10_000)), // default to 0.01% of the pool's initial WETH token_trade_amount: args .token_trade_amount - .unwrap_or(args.initial_token_supply / U256::from(10_000)), // default to 0.01% of the initial supply + .unwrap_or(args.initial_token_supply / U256::from(100_000)), // default to 0.001% of the initial supply } } } @@ -171,6 +183,7 @@ impl ToTestConfig for UniV2Args { token_a.name, token_b.name )) .with_args(&[token_a.template_name(), token_b.template_name()]) + .with_gas_limit(3_000_000) }; let add_liquidity = |token_a: &CompiledContract, token_b: &CompiledContract, @@ -200,12 +213,14 @@ impl ToTestConfig for UniV2Args { "{_sender}".to_owned(), deadline.to_string(), ]) + .with_gas_limit(300_000) }; let transfer = |token: &CompiledContract, to: &CompiledContract, amount: U256| { FunctionCallDefinition::new(token.template_name()) .with_signature("transfer(address,uint256)") .with_kind(format!("{}_transfer_to_{}", token.name, to.name)) .with_args(&[to.template_name(), amount.to_string()]) + .with_gas_limit(200_000) }; let approve_max = |token: &CompiledContract, spender: &CompiledContract| { FunctionCallDefinition::new(token.template_name()) diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index a9985cef..b33f7b9d 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -68,7 +68,7 @@ async fn run() -> Result<(), CliError> { "scenario:simple.toml" }; let scenario = SpamScenario::Testfile(testfile.to_owned()); - let args = SetupCommandArgs::new(scenario, *args)?; + let args = SetupCommandArgs::new(scenario, args.rpc_args)?; commands::setup(&db, args).await? } @@ -87,7 +87,7 @@ async fn run() -> Result<(), CliError> { let SpamCliArgs { eth_json_rpc_args: ScenarioSendTxsCliArgs { - testfile, rpc_url, .. + testfile, rpc_args, .. }, spam_args, gen_report, @@ -97,7 +97,7 @@ async fn run() -> Result<(), CliError> { let SendSpamCliArgs { loops, .. } = spam_args.to_owned(); let client = ClientBuilder::default() - .http(Url::from_str(&rpc_url).map_err(ArgsError::UrlParse)?); + .http(Url::from_str(&rpc_args.rpc_url).map_err(ArgsError::UrlParse)?); let provider = DynProvider::new( ProviderBuilder::new() .network::() @@ -138,20 +138,49 @@ async fn run() -> Result<(), CliError> { ContenderSubcommand::Report { last_run_id, preceding_runs, + campaign_id, } => { - contender_report::command::report( - last_run_id, - preceding_runs, - &db, - &data_dir().expect("invalid data dir"), - ) - .await - .map_err(CliError::Report)?; + if let Some(campaign_id) = campaign_id { + let resolved_campaign_id = if campaign_id == "__LATEST_CAMPAIGN__" { + db.latest_campaign_id() + .map_err(CliError::Db)? + .ok_or_else(|| { + CliError::Report(contender_report::Error::CampaignNotFound( + "latest".to_string(), + )) + })? + } else { + campaign_id + }; + if preceding_runs > 0 { + warn!("--preceding-runs is ignored when --campaign is provided"); + } + contender_report::command::report_campaign( + &resolved_campaign_id, + &db, + &data_dir().expect("invalid data dir"), + ) + .await + .map_err(CliError::Report)?; + } else { + contender_report::command::report( + last_run_id, + preceding_runs, + &db, + &data_dir().expect("invalid data dir"), + ) + .await + .map_err(CliError::Report)?; + } } ContenderSubcommand::Admin { command } => { handle_admin_command(command, db).await?; } + + ContenderSubcommand::Campaign { args } => { + commands::campaign::run_campaign(&db, *args).await?; + } }; Ok(()) diff --git a/crates/core/src/db/mock.rs b/crates/core/src/db/mock.rs index f8e2057c..622738e7 100644 --- a/crates/core/src/db/mock.rs +++ b/crates/core/src/db/mock.rs @@ -80,6 +80,14 @@ impl DbOps for MockDb { Ok(None) } + fn latest_campaign_id(&self) -> Result, Self::Error> { + Ok(None) + } + + fn get_runs_by_campaign(&self, _campaign_id: &str) -> Result, Self::Error> { + Ok(vec![]) + } + fn num_runs(&self) -> Result { Ok(0) } diff --git a/crates/core/src/db/runs.rs b/crates/core/src/db/runs.rs index 7c98cfbc..fe34ccfa 100644 --- a/crates/core/src/db/runs.rs +++ b/crates/core/src/db/runs.rs @@ -21,6 +21,9 @@ pub struct SpamRun { pub timestamp: usize, pub tx_count: usize, pub scenario_name: String, + pub campaign_id: Option, + pub campaign_name: Option, + pub stage_name: Option, pub rpc_url: String, pub txs_per_duration: u64, pub duration: SpamDuration, @@ -31,6 +34,9 @@ pub struct SpamRunRequest { pub timestamp: usize, pub tx_count: usize, pub scenario_name: String, + pub campaign_id: Option, + pub campaign_name: Option, + pub stage_name: Option, pub rpc_url: String, pub txs_per_duration: u64, pub duration: SpamDuration, diff --git a/crates/core/src/db/trait.rs b/crates/core/src/db/trait.rs index 3a16252c..42fc2647 100644 --- a/crates/core/src/db/trait.rs +++ b/crates/core/src/db/trait.rs @@ -25,8 +25,13 @@ pub trait DbOps { fn get_run(&self, run_id: u64) -> Result, Self::Error>; + fn get_runs_by_campaign(&self, campaign_id: &str) -> Result, Self::Error>; + fn get_run_txs(&self, run_id: u64) -> Result, Self::Error>; + /// Get latest non-null campaign_id (by run id desc). + fn latest_campaign_id(&self) -> Result, Self::Error>; + /// Insert a new named tx into the database. Used for named contracts. fn insert_named_txs( &self, diff --git a/crates/core/src/orchestrator.rs b/crates/core/src/orchestrator.rs index 4ce8c666..ef4c5191 100644 --- a/crates/core/src/orchestrator.rs +++ b/crates/core/src/orchestrator.rs @@ -385,6 +385,9 @@ impl RunOpts { .as_millis() as usize, tx_count: (self.periods * self.txs_per_period) as usize, scenario_name: self.name.to_owned(), + campaign_id: None, + campaign_name: None, + stage_name: None, rpc_url: rpc_url.as_ref().to_owned(), txs_per_duration: self.txs_per_period, duration: spam_duration, diff --git a/crates/core/src/test_scenario.rs b/crates/core/src/test_scenario.rs index 03b60802..093094d7 100644 --- a/crates/core/src/test_scenario.rs +++ b/crates/core/src/test_scenario.rs @@ -16,7 +16,6 @@ use crate::{ util::{get_blob_fee_maybe, get_block_time, ExtraTxParams}, Result, }; -use alloy::transports::http::reqwest; use alloy::{ consensus::constants::{ETH_TO_WEI, GWEI_TO_WEI}, consensus::{Transaction, TxType}, @@ -33,6 +32,7 @@ use alloy::{ serde::WithOtherFields, signers::local::{LocalSigner, PrivateKeySigner}, }; +use alloy::{network::ReceiptResponse, transports::http::reqwest}; use contender_bundle_provider::{ bundle::BundleType, bundle_provider::new_basic_bundle, revert_bundle::RevertProtectBundleRequest, BundleClient, @@ -138,6 +138,15 @@ pub struct TestScenarioParams { pub rpc_batch_size: u64, } +pub struct SpamRunContext<'a, F: SpamCallback + 'static> { + pub gas_sender: &'a tokio::sync::mpsc::Sender, + pub nonce_sender: &'a tokio::sync::mpsc::Sender<(Address, i32)>, + pub success_sender: &'a tokio::sync::mpsc::Sender<()>, + pub callback_handler: &'a F, + pub tx_handlers: &'a HashMap>, + pub cancel_token: &'a CancellationToken, +} + #[derive(Clone, Debug)] pub struct ExecutionContext { /// Adds this amount of wei per gas to the gas price given to each transaction. May be negative to subtract gas. @@ -157,6 +166,10 @@ impl ExecutionContext { pub fn add_to_gas_price(&mut self, amount: i128) { self.gas_price_adder += amount; } + + pub fn cancel_run(&self) { + self.cancel_token.cancel(); + } } struct DeployContractParams<'a, D: DbOps> { @@ -675,6 +688,16 @@ where // get receipt using provider (not wallet) to allow any receipt type (support non-eth chains) let receipt = res.get_receipt().await?; + debug!( + "got receipt for {:?}: ({}) {}", + tx_req.kind, + if receipt.status() { + "LANDED" + } else { + "REVERTED" + }, + receipt.transaction_hash + ); if let Some(name) = tx_req.name { db.insert_named_txs( @@ -880,6 +903,8 @@ where // takes gas to add to the gas price for the next batch (if needed) let gas_sender = Arc::new(context_handler.add_gas); + // shifts nonce of address by given amount + let nonce_sender = Arc::new(context_handler.shift_nonce); // counts number of txs that were sent successfully let success_sender = Arc::new(context_handler.success_send_tx); let bundle_type = self.bundle_type; @@ -905,6 +930,7 @@ where let callback_handler = callback_handler.clone(); let gas_sender = gas_sender.clone(); let success_sender = success_sender.clone(); + let nonce_sender = nonce_sender.clone(); let cancel_token = self.ctx.cancel_token.clone(); let error_sender = error_sender.clone(); @@ -918,6 +944,14 @@ where let res = rpc_client .send_tx_envelope(AnyTxEnvelope::Ethereum(*signed_tx)) .await; + let ctx = SpamRunContext { + nonce_sender: &nonce_sender, + success_sender: &success_sender, + gas_sender: &gas_sender, + callback_handler: callback_handler.as_ref(), + tx_handlers: &tx_handlers, + cancel_token: &cancel_token, + }; match res { Ok(_) => { @@ -927,11 +961,7 @@ where &req, extra, None, - &gas_sender, - &success_sender, - callback_handler.as_ref(), - &tx_handlers, - &cancel_token, + &ctx ) .await; Vec::>>::new() @@ -946,11 +976,7 @@ where &req, extra, Some(msg_string.as_str()), - &gas_sender, - &success_sender, - callback_handler.as_ref(), - &tx_handlers, - &cancel_token, + &ctx ) .await; Vec::>>::new() @@ -1055,6 +1081,7 @@ where let tx_handlers = self.msg_handles.clone(); let callback_handler = callback_handler.clone(); let gas_sender = gas_sender.clone(); + let nonce_sender = nonce_sender.clone(); let success_sender = success_sender.clone(); let cancel_token = self.ctx.cancel_token.clone(); let http_client = http_client.clone(); @@ -1114,18 +1141,15 @@ where .and_then(|e| e.get("message")) .and_then(|m| m.as_str()); - handle_tx_outcome( - tx_hash, - &req, - extra, - error_msg, - &gas_sender, - &success_sender, - callback_handler.as_ref(), - &tx_handlers, - &cancel_token, - ) - .await; + let ctx = SpamRunContext { + nonce_sender: &nonce_sender, + success_sender: &success_sender, + gas_sender: &gas_sender, + callback_handler: callback_handler.as_ref(), + tx_handlers: &tx_handlers, + cancel_token: &cancel_token, + }; + handle_tx_outcome(tx_hash, &req, extra, error_msg, &ctx).await; } })); } @@ -1152,9 +1176,12 @@ where // initialize async context handlers let (success_sender, mut success_receiver) = tokio::sync::mpsc::channel(num_payloads); let (add_gas_sender, mut add_gas_receiver) = tokio::sync::mpsc::channel(num_payloads); + let (shift_nonce_sender, mut shift_nonce_receiver) = + tokio::sync::mpsc::channel(tx_req_chunks[0].len()); let context = SpamContextHandler { success_send_tx: success_sender, add_gas: add_gas_sender, + shift_nonce: shift_nonce_sender, }; // send this batch of spam txs @@ -1199,6 +1226,30 @@ where self.ctx.add_to_gas_price(gas as i128); } + // shift nonces if needed + // Accumulate all nonce adjustments per address to avoid race conditions + // where multiple updates for the same address read stale nonce values + shift_nonce_receiver.close(); + let mut nonce_adjustments: HashMap = HashMap::new(); + while let Some((addr, shift)) = shift_nonce_receiver.recv().await { + *nonce_adjustments.entry(addr).or_insert(0) += shift; + } + + // Apply accumulated adjustments + for (addr, total_shift) in nonce_adjustments { + let current_nonce = self.nonces.get(&addr).copied().unwrap_or_default(); + let new_nonce = if total_shift < 0 { + current_nonce.saturating_sub(total_shift.unsigned_abs() as u64) + } else { + current_nonce.saturating_add(total_shift as u64) + }; + debug!( + "nonce for {} adjusted by {} (from {} to {})", + addr, total_shift, current_nonce, new_nonce + ); + self.nonces.insert(addr, new_nonce); + } + // decrease gas price if all txs were sent successfully success_receiver.close(); let mut success_count = 0; @@ -1503,46 +1554,72 @@ where } } -#[allow(clippy::too_many_arguments)] -async fn handle_tx_outcome( +async fn handle_tx_outcome<'a, F: SpamCallback + 'static>( tx_hash: alloy::primitives::TxHash, req: &NamedTxRequest, mut extra: RuntimeTxInfo, error_msg: Option<&str>, - gas_sender: &tokio::sync::mpsc::Sender, - success_sender: &tokio::sync::mpsc::Sender<()>, - callback_handler: &F, - tx_handlers: &HashMap>, - cancel_token: &CancellationToken, + ctx: &SpamRunContext<'a, F>, ) { // gas bump if needed if let Some(msg) = error_msg { - let lower = msg.to_lowercase(); - if lower.contains("replacement transaction underpriced") { + let message = msg.to_lowercase(); + if message.contains("replacement transaction underpriced") { let bump = req .tx .max_fee_per_gas .unwrap_or(req.tx.gas_price.unwrap_or(1_000_000_000)) / 10; - let _ = gas_sender.send(bump).await; + if let Err(e) = ctx.gas_sender.send(bump).await { + warn!("failed to send gas bump for tx {}: {:?}", tx_hash, e); + } + } else if message.contains("nonce too low") { + if let Some(from) = req.tx.from { + debug!("incrementing nonce for {}", from); + if let Err(e) = ctx.nonce_sender.send((from, 1)).await { + warn!("failed to send nonce increment for {}: {:?}", from, e); + } + } else { + warn!( + "nonce too low error but tx.from is missing for tx {}", + tx_hash + ); + } + } else if message.contains("nonce too high") { + if let Some(from) = req.tx.from { + debug!("decrementing nonce for {}", from); + if let Err(e) = ctx.nonce_sender.send((from, -1)).await { + warn!("failed to send nonce decrement for {}: {:?}", from, e); + } + } else { + warn!( + "nonce too high error but tx.from is missing for tx {}", + tx_hash + ); + } } warn!("error from tx {tx_hash}: {msg}"); extra = extra.with_error(msg.to_string()); } else { // success path - let _ = success_sender.send(()).await; + if let Err(e) = ctx.success_sender.send(()).await { + warn!( + "failed to send success notification for tx {}: {:?}", + tx_hash, e + ); + } } - let maybe_handle = callback_handler.on_tx_sent( + let maybe_handle = ctx.callback_handler.on_tx_sent( PendingTransactionConfig::new(tx_hash), req, extra, - Some(tx_handlers.clone()), + Some(ctx.tx_handlers.clone()), ); if let Some(handle) = maybe_handle { tokio::select! { - _ = cancel_token.cancelled() => { + _ = ctx.cancel_token.cancelled() => { debug!("cancelled spammer task"); } _ = handle => { /* wait for callback */ } @@ -1641,6 +1718,7 @@ where struct SpamContextHandler { add_gas: tokio::sync::mpsc::Sender, success_send_tx: tokio::sync::mpsc::Sender<()>, + shift_nonce: tokio::sync::mpsc::Sender<(Address, i32)>, } trait TxKey { diff --git a/crates/report/src/command.rs b/crates/report/src/command.rs index ab4994f8..540e74f1 100644 --- a/crates/report/src/command.rs +++ b/crates/report/src/command.rs @@ -1,4 +1,4 @@ -use super::gen_html::{build_html_report, ReportMetadata}; +use super::gen_html::{build_html_report, CampaignMetadata, ReportMetadata}; use super::util::std_deviation; use crate::block_trace::{estimate_block_data, get_block_data, get_block_traces}; use crate::cache::CacheFile; @@ -14,9 +14,11 @@ use alloy::network::AnyNetwork; use alloy::providers::DynProvider; use alloy::{providers::ProviderBuilder, transports::http::reqwest::Url}; use contender_core::buckets::{Bucket, BucketsExt}; +use contender_core::db::SpamRun; use contender_core::db::{DbOps, RunTx}; use csv::WriterBuilder; use serde::{Deserialize, Serialize}; +use serde_json; use std::collections::BTreeMap; use std::env; use std::fs; @@ -87,6 +89,16 @@ pub async fn report( run_data.push(run); } } + let campaign_context = run_data.iter().rev().find_map(|run| { + run.campaign_id + .as_ref() + .map(|campaign_id| CampaignMetadata { + id: Some(campaign_id.to_owned()), + name: run.campaign_name.clone(), + stage: run.stage_name.clone(), + scenario: Some(run.scenario_name.clone()), + }) + }); // collect all unique scenario_name values from run_data let scenario_names: Vec = run_data .iter() @@ -261,6 +273,7 @@ pub async fn report( pending_txs: pending_txs.echart_data(), latency_data_sendrawtransaction: latency_chart_sendrawtx.echart_data(), }, + campaign: campaign_context, }, &format!("{data_dir}/reports"), )?; @@ -287,6 +300,328 @@ fn save_csv_report(id: u64, txs: &[RunTx], reports_dir: &str) -> Result<()> { Ok(()) } +#[derive(Clone, Debug, Serialize)] +struct CampaignRunSummary { + run_id: u64, + scenario_name: String, + stage_name: Option, + tx_count: usize, + duration: String, + report_path: String, +} + +#[derive(Clone, Debug, Serialize)] +struct CampaignReportSummary { + campaign_id: String, + campaign_name: Option, + runs: Vec, + totals_by_stage: BTreeMap>, + overall: Option, + stage_scenario: Vec, + logs_incomplete: bool, +} + +#[derive(Clone, Debug, Serialize)] +struct CampaignOverall { + total_tx_count: u64, + total_error_count: u64, + error_rate: f64, + campaign_start_time: Option, + campaign_end_time: Option, + campaign_duration_secs: u64, + avg_tps: f64, +} + +#[derive(Clone, Debug, Serialize)] +struct StageScenarioSummary { + stage_name: String, + scenario_name: String, + total_tx_count: u64, + total_error_count: u64, + error_rate: f64, + duration_secs: u64, + avg_tps: f64, +} + +pub async fn report_campaign( + campaign_id: &str, + db: &(impl DbOps + Clone + Send + Sync + 'static), + data_dir: &str, +) -> Result<()> { + let runs = db.get_runs_by_campaign(campaign_id).map_err(|e| e.into())?; + if runs.is_empty() { + return Err(Error::CampaignNotFound(campaign_id.to_owned())); + } + + let data_path = Path::new(data_dir).join("reports"); + if !data_path.exists() { + fs::create_dir_all(&data_path)?; + } + + let mut summaries = Vec::new(); + let mut totals: BTreeMap> = BTreeMap::new(); + let campaign_name = runs.first().and_then(|r| r.campaign_name.clone()); + let mut overall_acc = OverallAccumulator::default(); + let mut stage_acc: BTreeMap<(String, String), StageAccumulator> = BTreeMap::new(); + let mut logs_incomplete = false; + + let previous_browser = env::var("BROWSER").ok(); + // Avoid opening a browser for every per-run report when generating a campaign summary. + env::set_var("BROWSER", "none"); + + let run_generation_result: Result<()> = async { + for run in &runs { + // generate per-run report (single run) + report(Some(run.id), 0, db, data_dir).await?; + let run_txs = db.get_run_txs(run.id).map_err(|e| e.into())?; + let (run_tx_count_from_logs, run_error_count_from_logs) = + tx_and_error_counts(&run_txs, run.tx_count); + let logs_complete = + !run_txs.is_empty() && (run_tx_count_from_logs as usize) >= run.tx_count; + if !logs_complete { + logs_incomplete = true; + } + + let run_tx_count: u64 = if logs_complete { + run_tx_count_from_logs + } else { + run.tx_count as u64 + }; + let run_error_count: u64 = if logs_complete { + run_error_count_from_logs + } else { + 0 + }; + + let (start_ms, end_ms) = if logs_complete { + run_time_bounds(run, &run_txs) + } else { + run_time_bounds(run, &[]) + }; + + overall_acc.add_run(run_tx_count, run_error_count, start_ms, end_ms); + + let stage_key = run + .stage_name + .clone() + .unwrap_or_else(|| "unspecified-stage".to_string()); + let scenario_key = run.scenario_name.clone(); + + stage_acc + .entry((stage_key.clone(), scenario_key.clone())) + .or_default() + .add_run(run_tx_count, run_error_count, start_ms, end_ms); + + let stage_key = run + .stage_name + .clone() + .unwrap_or_else(|| "unspecified-stage".to_string()); + let scenario_key = run.scenario_name.clone(); + totals + .entry(stage_key.clone()) + .or_default() + .entry(scenario_key.clone()) + .and_modify(|count| *count += run_tx_count as usize) + .or_insert(run_tx_count as usize); + + let report_file = format!("report-{}-{}.html", run.id, run.id); + summaries.push(CampaignRunSummary { + run_id: run.id, + scenario_name: run.scenario_name.clone(), + stage_name: run.stage_name.clone(), + tx_count: run_tx_count as usize, + duration: run.duration.to_string(), + report_path: report_file, + }); + } + Ok(()) + } + .await; + if let Some(prev) = previous_browser { + env::set_var("BROWSER", prev); + } else { + env::remove_var("BROWSER"); + } + run_generation_result?; + + let summary = CampaignReportSummary { + campaign_id: campaign_id.to_owned(), + campaign_name, + runs: summaries, + totals_by_stage: totals, + overall: Some(overall_acc.into_overall()), + stage_scenario: stage_acc + .into_iter() + .map(|((stage, scenario), acc)| acc.into_summary(stage, scenario)) + .collect(), + logs_incomplete, + }; + + let index_path = data_path.join(format!("campaign-{campaign_id}.html")); + let html = render_campaign_html(&summary)?; + fs::write(&index_path, html)?; + + let summary_path = data_path.join(format!("campaign-{campaign_id}.json")); + fs::write(&summary_path, serde_json::to_string_pretty(&summary)?)?; + + info!( + campaign_id = %campaign_id, + html = %index_path.display(), + json = %summary_path.display(), + "Generated campaign report" + ); + + Ok(()) +} + +fn render_campaign_html(summary: &CampaignReportSummary) -> Result { + let template = include_str!("template_campaign.html.handlebars"); + let html = handlebars::Handlebars::new() + .render_template(template, &serde_json::json!({ "campaign": summary }))?; + Ok(html) +} + +#[derive(Default)] +struct OverallAccumulator { + total_tx: u64, + total_errors: u64, + start_ms: Option, + end_ms: Option, +} + +impl OverallAccumulator { + fn add_run(&mut self, tx: u64, errors: u64, start_ms: Option, end_ms: Option) { + self.total_tx = self.total_tx.saturating_add(tx); + self.total_errors = self.total_errors.saturating_add(errors); + if let Some(s) = start_ms { + self.start_ms = Some(self.start_ms.map_or(s, |curr| curr.min(s))); + } + if let Some(e) = end_ms { + self.end_ms = Some(self.end_ms.map_or(e, |curr| curr.max(e))); + } + } + + fn into_overall(self) -> CampaignOverall { + let duration_secs = match (self.start_ms, self.end_ms) { + (Some(s), Some(e)) if e > s => ((e - s) / 1000) as u64, + _ => 0, + }; + let avg_tps = if duration_secs > 0 { + let raw = self.total_tx as f64 / duration_secs as f64; + (raw * 100.0).round() / 100.0 // Round to 2 decimal places + } else { + 0.0 + }; + let error_rate = if self.total_tx > 0 { + let raw = self.total_errors as f64 / self.total_tx as f64; + (raw * 100.0).round() / 100.0 // Round to 2 decimal places + } else { + 0.0 + }; + CampaignOverall { + total_tx_count: self.total_tx, + total_error_count: self.total_errors, + error_rate, + campaign_start_time: self.start_ms.map(|v| millis_to_rfc3339(v as i64)), + campaign_end_time: self.end_ms.map(|v| millis_to_rfc3339(v as i64)), + campaign_duration_secs: duration_secs, + avg_tps, + } + } +} + +#[derive(Default)] +struct StageAccumulator { + total_tx: u64, + total_errors: u64, + start_ms: Option, + end_ms: Option, +} + +impl StageAccumulator { + fn add_run(&mut self, tx: u64, errors: u64, start_ms: Option, end_ms: Option) { + self.total_tx = self.total_tx.saturating_add(tx); + self.total_errors = self.total_errors.saturating_add(errors); + if let Some(s) = start_ms { + self.start_ms = Some(self.start_ms.map_or(s, |curr| curr.min(s))); + } + if let Some(e) = end_ms { + self.end_ms = Some(self.end_ms.map_or(e, |curr| curr.max(e))); + } + } + + fn into_summary(self, stage: String, scenario: String) -> StageScenarioSummary { + let duration_secs = match (self.start_ms, self.end_ms) { + (Some(s), Some(e)) if e > s => ((e - s) / 1000) as u64, + _ => 0, + }; + let avg_tps = if duration_secs > 0 { + let raw = self.total_tx as f64 / duration_secs as f64; + (raw * 100.0).round() / 100.0 // Round to 2 decimal places + } else { + 0.0 + }; + let error_rate = if self.total_tx > 0 { + let raw = self.total_errors as f64 / self.total_tx as f64; + (raw * 100.0).round() / 100.0 // Round to 2 decimal places + } else { + 0.0 + }; + StageScenarioSummary { + stage_name: stage, + scenario_name: scenario, + total_tx_count: self.total_tx, + total_error_count: self.total_errors, + error_rate, + duration_secs, + avg_tps, + } + } +} + +fn millis_to_rfc3339(ms: i64) -> String { + use chrono::Utc; + let dt = chrono::DateTime::from_timestamp_millis(ms) + .unwrap_or_else(|| chrono::DateTime::::from_timestamp_millis(0).unwrap()); + dt.to_rfc3339() +} + +fn tx_and_error_counts(run_txs: &[RunTx], fallback_tx_count: usize) -> (u64, u64) { + if run_txs.is_empty() { + return (fallback_tx_count as u64, 0); + } + let tx_count = run_txs.len() as u64; + let error_count = run_txs.iter().filter(|tx| tx.error.is_some()).count() as u64; + (tx_count, error_count) +} + +fn run_time_bounds(run: &SpamRun, run_txs: &[RunTx]) -> (Option, Option) { + if !run_txs.is_empty() { + let start = run_txs + .iter() + .map(|t| t.start_timestamp_secs as u128 * 1000) + .min(); + let end = run_txs + .iter() + .map(|t| { + t.end_timestamp_secs + .map(|e| e as u128 * 1000) + .unwrap_or(t.start_timestamp_secs as u128 * 1000) + }) + .max(); + return (start, end); + } + let start_ms = run.timestamp as u128; + let duration_ms = if run.duration.is_seconds() { + run.duration.value().saturating_mul(1000) as u128 + } else { + // fallback: treat blocks as seconds for rough duration if not seconds + run.duration.value().saturating_mul(1000) as u128 + }; + let end_ms = start_ms.saturating_add(duration_ms); + (Some(start_ms), Some(end_ms)) +} + #[derive(Clone, Debug, Deserialize, Serialize)] /// For display purposes only. Values are in milliseconds. pub struct RpcLatencyQuantiles { diff --git a/crates/report/src/error.rs b/crates/report/src/error.rs index 57118307..65cd55fd 100644 --- a/crates/report/src/error.rs +++ b/crates/report/src/error.rs @@ -48,6 +48,9 @@ pub enum Error { #[error("run (id={0}) does not exist")] RunDoesNotExist(u64), + #[error("no runs found for campaign id {0}")] + CampaignNotFound(String), + #[error("serde_json error")] SerdeJson(#[from] serde_json::Error), } diff --git a/crates/report/src/gen_html.rs b/crates/report/src/gen_html.rs index 787d02ce..e04bb731 100644 --- a/crates/report/src/gen_html.rs +++ b/crates/report/src/gen_html.rs @@ -18,6 +18,15 @@ pub struct ReportMetadata { pub rpc_url: String, pub metrics: SpamRunMetrics, pub chart_data: ChartData, + pub campaign: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CampaignMetadata { + pub id: Option, + pub name: Option, + pub stage: Option, + pub scenario: Option, } #[derive(Clone, Debug, Deserialize, Serialize)] @@ -39,6 +48,7 @@ struct TemplateData { end_block: String, metrics: SpamRunMetrics, chart_data: ChartData, + campaign: Option, } impl TemplateData { @@ -51,6 +61,7 @@ impl TemplateData { end_block: meta.end_block.to_string(), metrics: meta.metrics.to_owned(), chart_data: meta.chart_data.to_owned(), + campaign: meta.campaign.to_owned(), } } } diff --git a/crates/report/src/template.html.handlebars b/crates/report/src/template.html.handlebars index 0731a5cf..014d51f1 100644 --- a/crates/report/src/template.html.handlebars +++ b/crates/report/src/template.html.handlebars @@ -172,6 +172,13 @@

{{data.date}}


Scenario: {{data.scenario_name}}

+ {{#if data.campaign}} +

Campaign: {{data.campaign.name}} (id: {{data.campaign.id}})

+

Stage: {{data.campaign.stage}}

+ {{#if data.campaign.scenario}} +

Campaign Scenario: {{data.campaign.scenario}}

+ {{/if}} + {{/if}}

Block Range: {{data.start_block}} - {{data.end_block}}

Target Chain

diff --git a/crates/report/src/template_campaign.html.handlebars b/crates/report/src/template_campaign.html.handlebars new file mode 100644 index 00000000..07384740 --- /dev/null +++ b/crates/report/src/template_campaign.html.handlebars @@ -0,0 +1,141 @@ + + + + + Campaign Report + + + +

Campaign Report

+

+ Campaign: + {{#if campaign.campaign_name}}{{campaign.campaign_name}}{{else}}unnamed{{/if}} +
+ Campaign ID: {{campaign.campaign_id}} +

+ + {{#if campaign.logs_incomplete}} +

+ Note: Transaction logs were incomplete for one or more runs. Totals and durations use stored run metadata; error counts may be under-reported. +

+ {{/if}} + + {{#if campaign.overall}} +

Campaign Summary

+ + + + + + + + + + + + + + + + + + + + + + + +
Total TXTotal ErrorsError RateStartEndDuration (s)Avg TPS
{{campaign.overall.total_tx_count}}{{campaign.overall.total_error_count}}{{campaign.overall.error_rate}}{{#if campaign.overall.campaign_start_time}}{{campaign.overall.campaign_start_time}}{{else}}-{{/if}}{{#if campaign.overall.campaign_end_time}}{{campaign.overall.campaign_end_time}}{{else}}-{{/if}}{{campaign.overall.campaign_duration_secs}}{{campaign.overall.avg_tps}}
+ {{/if}} + + {{#if campaign.stage_scenario}} +

Per Stage & Scenario

+ + + + + + + + + + + + + + {{#each campaign.stage_scenario}} + + + + + + + + + + {{/each}} + +
StageScenarioTotal TXTotal ErrorsError RateDuration (s)Avg TPS
{{stage_name}}{{scenario_name}}{{total_tx_count}}{{total_error_count}}{{error_rate}}{{duration_secs}}{{avg_tps}}
+ {{/if}} + + {{#if campaign.totals_by_stage}} +

Totals by Stage

+ + + + + + + + + + {{#each campaign.totals_by_stage}} + {{#each this}} + + + + + + {{/each}} + {{/each}} + +
StageScenarioTXs
{{@../key}}{{@key}}{{this}}
+ {{/if}} + + {{#if campaign.runs}} +

Runs

+ + + + + + + + + + + + + {{#each campaign.runs}} + + + + + + + + + {{/each}} + +
Run IDStageScenarioTX CountDurationReport
{{run_id}}{{#if stage_name}}{{stage_name}}{{else}}unspecified-stage{{/if}}{{scenario_name}}{{tx_count}}{{duration}}View report
+ {{/if}} + + + diff --git a/crates/sqlite_db/src/db.rs b/crates/sqlite_db/src/db.rs index a759141c..35d243bc 100644 --- a/crates/sqlite_db/src/db.rs +++ b/crates/sqlite_db/src/db.rs @@ -148,6 +148,9 @@ struct SpamRunRow { pub timestamp: String, pub tx_count: usize, pub scenario_name: String, + pub campaign_id: Option, + pub campaign_name: Option, + pub stage_name: Option, pub rpc_url: String, pub txs_per_duration: u64, pub duration: String, @@ -161,6 +164,9 @@ impl From for SpamRun { timestamp: row.timestamp.parse::().expect("invalid timestamp"), tx_count: row.tx_count, scenario_name: row.scenario_name, + campaign_id: row.campaign_id, + campaign_name: row.campaign_name, + stage_name: row.stage_name, rpc_url: row.rpc_url, txs_per_duration: row.txs_per_duration, duration: row.duration.into(), @@ -224,6 +230,9 @@ impl DbOps for SqliteDb { timestamp TEXT NOT NULL, tx_count INTEGER NOT NULL, scenario_name TEXT NOT NULL DEFAULT '', + campaign_id TEXT, + campaign_name TEXT, + stage_name TEXT, rpc_url TEXT NOT NULL DEFAULT '', txs_per_duration INTEGER NOT NULL, duration TEXT NOT NULL, @@ -286,14 +295,17 @@ impl DbOps for SqliteDb { timestamp, tx_count, scenario_name, + campaign_id, + campaign_name, + stage_name, rpc_url, txs_per_duration, duration, pending_timeout, } = run; self.execute( - "INSERT INTO runs (timestamp, tx_count, scenario_name, rpc_url, txs_per_duration, duration, timeout) VALUES (?, ?, ?, ?, ?, ?, ?)", - params![timestamp, tx_count, scenario_name, rpc_url, txs_per_duration, &duration.to_string(), pending_timeout.as_secs()], + "INSERT INTO runs (timestamp, tx_count, scenario_name, campaign_id, campaign_name, stage_name, rpc_url, txs_per_duration, duration, timeout) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + params![timestamp, tx_count, scenario_name, campaign_id, campaign_name, stage_name, rpc_url, txs_per_duration, &duration.to_string(), pending_timeout.as_secs()], )?; // get ID from newly inserted row let id: u64 = self.query_row("SELECT last_insert_rowid()", params![], |row| row.get(0))?; @@ -321,7 +333,7 @@ impl DbOps for SqliteDb { let pool = self.get_pool()?; let mut stmt = pool .prepare( - "SELECT id, timestamp, tx_count, scenario_name, rpc_url, txs_per_duration, duration, timeout FROM runs WHERE id = ?1", + "SELECT id, timestamp, tx_count, scenario_name, campaign_id, campaign_name, stage_name, rpc_url, txs_per_duration, duration, timeout FROM runs WHERE id = ?1", )?; let row = stmt.query_map(params![run_id], |row| { @@ -330,42 +342,83 @@ impl DbOps for SqliteDb { timestamp: row.get(1)?, tx_count: row.get(2)?, scenario_name: row.get(3)?, - rpc_url: row.get(4)?, - txs_per_duration: row.get(5)?, - duration: row.get(6)?, - timeout: row.get(7)?, + campaign_id: row.get(4)?, + campaign_name: row.get(5)?, + stage_name: row.get(6)?, + rpc_url: row.get(7)?, + txs_per_duration: row.get(8)?, + duration: row.get(9)?, + timeout: row.get(10)?, }) })?; let res = row.last().transpose()?; Ok(res.map(|r| r.into())) } + fn get_runs_by_campaign(&self, campaign_id: &str) -> Result> { + let pool = self.get_pool()?; + let mut stmt = pool.prepare( + "SELECT id, timestamp, tx_count, scenario_name, campaign_id, campaign_name, stage_name, rpc_url, txs_per_duration, duration, timeout FROM runs WHERE campaign_id = ?1 ORDER BY id ASC", + )?; + let rows = stmt.query_map(params![campaign_id], |row| { + Ok(SpamRunRow { + id: row.get(0)?, + timestamp: row.get(1)?, + tx_count: row.get(2)?, + scenario_name: row.get(3)?, + campaign_id: row.get(4)?, + campaign_name: row.get(5)?, + stage_name: row.get(6)?, + rpc_url: row.get(7)?, + txs_per_duration: row.get(8)?, + duration: row.get(9)?, + timeout: row.get(10)?, + }) + })?; + let res = rows + .map(|r| r.map(|r| r.into())) + .map(|r| r.map_err(|e| e.into())) + .collect::>>()?; + Ok(res) + } + + fn latest_campaign_id(&self) -> Result> { + let pool = self.get_pool()?; + let mut stmt = pool.prepare( + "SELECT campaign_id FROM runs WHERE campaign_id IS NOT NULL ORDER BY id DESC LIMIT 1", + )?; + let row = stmt + .query_map(params![], |row| row.get(0))? + .last() + .transpose()?; + Ok(row) + } + fn insert_named_txs( &self, named_txs: &[NamedTx], rpc_url: &str, genesis_hash: FixedBytes<32>, ) -> Result<()> { - let pool = self.get_pool()?; let rpc_url_id = self.get_rpc_url_id(rpc_url, genesis_hash)?; - let stmts = named_txs.iter().map(|tx| { - format!( - "INSERT INTO named_txs (name, tx_hash, contract_address, rpc_url_id) VALUES ('{}', '{}', '{}', {});", - tx.name, - tx.tx_hash.encode_hex(), - tx.address.map(|a| a.encode_hex()).unwrap_or_default(), - rpc_url_id, - ) - }); - pool.execute_batch(&format!( - "BEGIN; - {} - COMMIT;", - stmts - .reduce(|ac, c| format!("{ac}\n{c}")) - .unwrap_or_default(), - ))?; + // Use a transaction for batch inserts with parameterized queries + let mut conn = self.get_pool()?; + let tx = conn.transaction()?; + + for named_tx in named_txs { + tx.execute( + "INSERT INTO named_txs (name, tx_hash, contract_address, rpc_url_id) VALUES (?1, ?2, ?3, ?4)", + params![ + &named_tx.name, + named_tx.tx_hash.encode_hex(), + named_tx.address.map(|a| a.encode_hex()).unwrap_or_default(), + rpc_url_id, + ], + )?; + } + + tx.commit()?; Ok(()) } @@ -555,6 +608,9 @@ mod tests { timestamp: 100, tx_count: 20, scenario_name: "test".to_string(), + campaign_id: None, + campaign_name: None, + stage_name: None, rpc_url: "http://test:8545".to_string(), txs_per_duration: 10, duration: SpamDuration::Seconds(10), @@ -569,6 +625,45 @@ mod tests { assert_eq!(db.num_runs().unwrap(), 3); } + #[test] + fn groups_runs_by_campaign_id() { + let db = SqliteDb::new_memory(); + db.create_tables().unwrap(); + let pending_timeout = Duration::from_secs(12); + let mk_run = |scenario: &str| SpamRunRequest { + timestamp: 100, + tx_count: 10, + scenario_name: scenario.to_string(), + campaign_id: Some("cmp-test".to_string()), + campaign_name: Some("cmp".to_string()), + stage_name: Some("stage-a".to_string()), + rpc_url: "http://test:8545".to_string(), + txs_per_duration: 5, + duration: SpamDuration::Seconds(2), + pending_timeout, + }; + + let first = db.insert_run(&mk_run("scenario:a")).unwrap(); + let second = db.insert_run(&mk_run("scenario:b")).unwrap(); + assert_ne!(first, second); + + let runs = db.get_runs_by_campaign("cmp-test").unwrap(); + assert_eq!(runs.len(), 2); + assert_ne!(runs[0].id, runs[1].id); + assert!(runs + .iter() + .all(|r| r.campaign_id.as_deref() == Some("cmp-test"))); + assert!(runs + .iter() + .all(|r| r.campaign_name.as_deref() == Some("cmp"))); + assert!(runs + .iter() + .all(|r| r.stage_name.as_deref() == Some("stage-a"))); + let scenario_names: Vec<_> = runs.iter().map(|r| r.scenario_name.as_str()).collect(); + assert!(scenario_names.contains(&"scenario:a")); + assert!(scenario_names.contains(&"scenario:b")); + } + #[test] fn inserts_and_gets_named_txs() { let db = SqliteDb::new_memory(); @@ -617,6 +712,9 @@ mod tests { timestamp: 100, tx_count: 20, scenario_name: "test".to_string(), + campaign_id: None, + campaign_name: None, + stage_name: None, rpc_url: "http://test:8545".to_string(), txs_per_duration: 10, duration: SpamDuration::Seconds(10), diff --git a/crates/sqlite_db/src/lib.rs b/crates/sqlite_db/src/lib.rs index 39afe2b4..864d2feb 100644 --- a/crates/sqlite_db/src/lib.rs +++ b/crates/sqlite_db/src/lib.rs @@ -3,7 +3,7 @@ mod db; pub mod error; /// Increment this whenever making changes to the DB schema. -pub static DB_VERSION: u64 = 5; +pub static DB_VERSION: u64 = 6; pub use ctx::*; pub use db::*; diff --git a/crates/testfile/src/campaign.rs b/crates/testfile/src/campaign.rs new file mode 100644 index 00000000..0e7658b5 --- /dev/null +++ b/crates/testfile/src/campaign.rs @@ -0,0 +1,464 @@ +use crate::{error::CampaignError, Result}; +use serde::{Deserialize, Serialize}; + +/// Defines the traffic pacing mode for a campaign stage. +#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +#[derive(Default)] +pub enum CampaignMode { + #[default] + Tps, + Tpb, +} + +/// Scenario weight for a stage. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CampaignMixEntry { + pub scenario: String, + pub share_pct: f64, +} + +/// A single spam stage within a campaign. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CampaignStage { + pub name: String, + pub duration: Option, + pub rate: Option, + #[serde(default)] + pub mix: Vec, +} + +/// Spam configuration shared across stages. +#[derive(Clone, Debug, Deserialize, Serialize, Default)] +pub struct CampaignSpam { + #[serde(default)] + pub mode: CampaignMode, + pub rate: Option, + pub duration: Option, + #[serde(default)] + pub seed: Option, + /// Maximum time in seconds for a stage to complete (separate from spam duration). + /// If a stage exceeds this timeout, it will be terminated. + #[serde(default)] + pub stage_timeout: Option, + #[serde(default)] + pub stage: Vec, + /// Shorthand for a single steady stage when no explicit `stage` entries are provided. + #[serde(default)] + pub mix: Option>, +} + +/// Composite / meta-scenario description. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct CampaignConfig { + pub name: String, + #[serde(default)] + pub description: Option, + pub spam: CampaignSpam, +} + +impl CampaignConfig { + /// Returns all scenario labels mentioned in spam declarations. + pub fn setup_scenarios(&self) -> Vec { + let mut all_scenarios = vec![]; + if let Some(mix) = &self.spam.mix { + let mut v = mix.iter().map(|m| m.scenario.clone()).collect::>(); + all_scenarios.append(&mut v); + } + + for stage in &self.spam.stage { + let mut v = stage + .mix + .iter() + .map(|m| m.scenario.clone()) + .collect::>(); + all_scenarios.append(&mut v); + } + + all_scenarios.dedup(); + all_scenarios + } +} + +/// Resolved runtime parameters per stage after validation/defaulting. +#[derive(Clone, Debug)] +pub struct ResolvedStage { + pub name: String, + pub rate: u64, + pub duration: u64, + pub stage_timeout: Option, + pub mix: Vec, +} + +#[derive(Clone, Debug)] +pub struct ResolvedMixEntry { + pub scenario: String, + pub share_pct: f64, + pub rate: u64, +} + +impl ResolvedMixEntry { + pub fn scenario_label(&self) -> String { + self.scenario.trim_start_matches("builtin:").to_owned() + } +} + +impl CampaignConfig { + /// Parse a campaign from TOML file. + pub fn from_file(path: &str) -> Result { + let contents = std::fs::read_to_string(path)?; + Self::from_toml_str(&contents) + } + + /// Parse a campaign from raw TOML. + pub fn from_toml_str(toml: &str) -> Result { + let cfg: CampaignConfig = toml::from_str(toml)?; + cfg.validate()?; + Ok(cfg) + } + + /// Validate top-level and stage-level invariants. + pub fn validate(&self) -> Result<()> { + if self.name.trim().is_empty() { + return Err(CampaignError::NameEmpty.into()); + } + // Normalize stages first so validation covers both explicit and shorthand forms. + let normalized_stages = self.spam.normalized_stages()?; + + for (idx, stage) in normalized_stages.iter().enumerate() { + if stage.mix.is_empty() { + return Err(CampaignError::StageMixEmpty { + index: idx, + name: stage.name.clone(), + } + .into()); + } + if stage.duration.is_none() && self.spam.duration.is_none() { + return Err(CampaignError::SpamDurationMissing { + index: idx, + name: stage.name.clone(), + } + .into()); + } + } + Ok(()) + } + + /// Normalize defaults and compute per-stage rates for execution. + pub fn resolve(&self) -> Result> { + let normalized_stages = self.spam.normalized_stages()?; + + let mut resolved_stages = Vec::new(); + for (idx, stage) in normalized_stages.iter().enumerate() { + let duration = stage.duration.unwrap_or(self.spam.duration.ok_or( + CampaignError::SpamDurationMissing { + index: idx, + name: stage.name.clone(), + }, + )?); + + let mix_sum: f64 = stage.mix.iter().map(|m| m.share_pct).sum(); + if mix_sum <= f64::EPSILON { + return Err(CampaignError::MixSharesSumInvalid { + name: stage.name.clone(), + } + .into()); + } + let rate = stage + .rate + .unwrap_or(self.spam.rate.ok_or(CampaignError::SpamRateMissing)?); + + // Normalize shares and compute integer rates; last entry absorbs rounding drift. + // Ensure mixes with share_pct > 0 get at least rate 1 to avoid unexpected 0 rates. + let mut resolved_mix = Vec::new(); + let mut assigned = 0u64; + + // First pass: calculate rates with rounding + let mut rates: Vec = stage + .mix + .iter() + .map(|mix| { + let normalized_share = mix.share_pct / mix_sum; + (rate as f64 * normalized_share).round() as u64 + }) + .collect(); + + // Second pass: ensure non-zero share_pct mixes get at least rate 1 + let non_zero_mixes = stage.mix.iter().filter(|m| m.share_pct > 0.0).count(); + if non_zero_mixes > 0 && rate >= non_zero_mixes as u64 { + // Find mixes that got 0 but have share_pct > 0 + let mut zero_indices = Vec::new(); + for (idx, mix) in stage.mix.iter().enumerate() { + if mix.share_pct > 0.0 && rates[idx] == 0 { + zero_indices.push(idx); + } + } + + // Give each zero-rated mix a rate of 1, taking from the largest allocations + for zero_idx in zero_indices { + // Find the mix with the highest rate (excluding last to avoid complications) + if let Some((max_idx, _)) = rates + .iter() + .enumerate() + .take(rates.len().saturating_sub(1)) + .max_by_key(|(_, &r)| r) + { + if rates[max_idx] > 1 { + rates[max_idx] -= 1; + rates[zero_idx] = 1; + } + } + } + } + + // Third pass: create ResolvedMixEntry with adjusted rates + for (idx, mix) in stage.mix.iter().enumerate() { + let mut scenario_rate = rates[idx]; + if idx == stage.mix.len() - 1 { + // Last entry gets exactly what's left to ensure total equals rate + let remaining = rate.saturating_sub(assigned); + + // Validate that rounding didn't cause excessive drift + if assigned > rate { + return Err(CampaignError::RateDistributionExceedsLimit { + name: stage.name.clone(), + assigned_rate: assigned, + total_rate: rate, + } + .into()); + } + + // Warn if the adjustment is significant + let expected = scenario_rate; + if expected > 0 && remaining > 0 { + let drift_pct = if remaining > expected { + ((remaining - expected) as f64 / expected as f64) * 100.0 + } else { + ((expected - remaining) as f64 / expected as f64) * 100.0 + }; + if drift_pct > 10.0 { + eprintln!( + "Warning: stage {} scenario {} rate adjusted from {} to {} ({:.1}% drift) due to rounding", + stage.name, mix.scenario, expected, remaining, drift_pct + ); + } + } + + scenario_rate = remaining; + } else { + assigned = assigned.saturating_add(scenario_rate); + } + resolved_mix.push(ResolvedMixEntry { + scenario: mix.scenario.clone(), + share_pct: mix.share_pct, + rate: scenario_rate, + }); + } + + resolved_stages.push(ResolvedStage { + name: stage.name.clone(), + rate, + duration, + stage_timeout: self.spam.stage_timeout, + mix: resolved_mix, + }); + } + + Ok(resolved_stages) + } +} + +impl CampaignSpam { + /// Normalize spam configuration into explicit stages, supporting shorthand `[spam] + [[spam.mix]]`. + pub fn normalized_stages(&self) -> Result> { + if !self.stage.is_empty() { + if self.mix.is_some() { + return Err(CampaignError::ConflictingMixAndStage.into()); + } + return Ok(self.stage.clone()); + } + + if let Some(mix) = &self.mix { + if mix.is_empty() { + return Err(CampaignError::SpamMixEmpty.into()); + } + let duration = self + .duration + .ok_or(CampaignError::ShorthandRequiresSpamDuration)?; + + let stage = CampaignStage { + name: "steady".to_string(), + duration: Some(duration), + rate: self.rate, + mix: mix.clone(), + }; + return Ok(vec![stage]); + } + + Err(CampaignError::SpamStageOrMixUndefined.into()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn normalized_prefers_stages_when_present() { + let spam = CampaignSpam { + rate: Some(10), + duration: Some(100), + stage: vec![CampaignStage { + name: "explicit".into(), + duration: Some(50), + rate: Some(10), + mix: vec![CampaignMixEntry { + scenario: "s1".into(), + share_pct: 100.0, + }], + }], + ..Default::default() + }; + + let stages = spam.normalized_stages().unwrap(); + assert_eq!(stages.len(), 1); + assert_eq!(stages[0].name, "explicit"); + } + + #[test] + fn normalized_shorthand_builds_stage() { + let spam = CampaignSpam { + mode: CampaignMode::Tps, + rate: Some(20), + duration: Some(600), + mix: Some(vec![ + CampaignMixEntry { + scenario: "s1".into(), + share_pct: 60.0, + }, + CampaignMixEntry { + scenario: "s2".into(), + share_pct: 40.0, + }, + ]), + ..Default::default() + }; + + let stages = spam.normalized_stages().unwrap(); + assert_eq!(stages.len(), 1); + let s = &stages[0]; + assert_eq!(s.name, "steady"); + assert_eq!(s.duration, Some(600)); + assert_eq!(s.rate, Some(20)); + assert_eq!(s.mix.len(), 2); + } + + #[test] + fn normalized_errors_when_both_stage_and_mix() { + let spam = CampaignSpam { + stage: vec![CampaignStage { + name: "explicit".into(), + duration: Some(10), + rate: Some(5), + mix: vec![CampaignMixEntry { + scenario: "s1".into(), + share_pct: 100.0, + }], + }], + mix: Some(vec![CampaignMixEntry { + scenario: "s2".into(), + share_pct: 100.0, + }]), + ..Default::default() + }; + + let err = spam.normalized_stages().unwrap_err(); + assert!(format!("{err}").contains("cannot define both")); + } + + #[test] + fn normalized_errors_when_missing_both() { + let spam = CampaignSpam::default(); + let err = spam.normalized_stages().unwrap_err(); + assert!(format!("{err}").contains("must define either")); + } + + #[test] + fn resolve_shorthand_matches_explicit_single_stage() { + let mix = vec![ + CampaignMixEntry { + scenario: "s1".into(), + share_pct: 60.0, + }, + CampaignMixEntry { + scenario: "s2".into(), + share_pct: 40.0, + }, + ]; + + let explicit = CampaignConfig { + name: "cmp".into(), + description: None, + spam: CampaignSpam { + mode: CampaignMode::Tps, + rate: Some(20), + duration: Some(600), + stage: vec![CampaignStage { + name: "steady".into(), + duration: Some(600), + rate: Some(20), + mix: mix.clone(), + }], + ..Default::default() + }, + }; + + let shorthand = CampaignConfig { + name: "cmp".into(), + description: None, + spam: CampaignSpam { + mode: CampaignMode::Tps, + rate: Some(20), + duration: Some(600), + mix: Some(mix.clone()), + ..Default::default() + }, + }; + + let explicit_resolved = explicit.resolve().unwrap(); + let shorthand_resolved = shorthand.resolve().unwrap(); + assert_eq!(explicit_resolved.len(), 1); + assert_eq!(shorthand_resolved.len(), 1); + let e = &explicit_resolved[0]; + let s = &shorthand_resolved[0]; + assert_eq!(e.name, s.name); + assert_eq!(e.rate, s.rate); + assert_eq!(e.duration, s.duration); + assert_eq!(e.mix.len(), s.mix.len()); + // scenario order and computed rates should match + for (em, sm) in e.mix.iter().zip(s.mix.iter()) { + assert_eq!(em.scenario, sm.scenario); + assert_eq!(em.rate, sm.rate); + } + } + + #[test] + fn validate_shorthand_passes() { + let cfg = CampaignConfig { + name: "cmp".into(), + description: None, + spam: CampaignSpam { + mode: CampaignMode::Tps, + rate: Some(5), + duration: Some(30), + mix: Some(vec![CampaignMixEntry { + scenario: "s1".into(), + share_pct: 100.0, + }]), + ..Default::default() + }, + }; + + cfg.validate().unwrap(); + } +} diff --git a/crates/testfile/src/error.rs b/crates/testfile/src/error.rs index 04ef0895..6c6e2649 100644 --- a/crates/testfile/src/error.rs +++ b/crates/testfile/src/error.rs @@ -17,4 +17,49 @@ pub enum Error { #[error("toml serialization error")] TomlSer(#[from] toml::ser::Error), + + #[error("campaign validation error: {0}")] + Campaign(#[from] CampaignError), +} + +#[derive(Debug, Error)] +pub enum CampaignError { + #[error("campaign name must not be empty")] + NameEmpty, + + #[error("'rate' must be specified for [spam] or [[spam.stage]]")] + SpamRateMissing, + + #[error("stage {index} ({name}) missing duration and no default spam duration provided")] + SpamDurationMissing { index: usize, name: String }, + + #[error("stage {name} mix shares must sum to a positive number")] + MixSharesSumInvalid { name: String }, + + #[error("stage {index} ({name}) must include at least one mix entry")] + StageMixEmpty { index: usize, name: String }, + + #[error("campaign spam: spam.mix must include at least one entry")] + SpamMixEmpty, + + #[error("campaign spam: must define either spam.stage or spam.mix + spam.duration")] + SpamStageOrMixUndefined, + + #[error("campaign spam: shorthand requires spam.duration")] + ShorthandRequiresSpamDuration, + + #[error("campaign spam: cannot define both spam.stage and spam.mix")] + ConflictingMixAndStage, + + #[error( + "stage {}: rate distribution error - assigned {} exceeds total rate {}", + name, + assigned_rate, + total_rate + )] + RateDistributionExceedsLimit { + name: String, + assigned_rate: u64, + total_rate: u64, + }, } diff --git a/crates/testfile/src/lib.rs b/crates/testfile/src/lib.rs index 66bf10be..b36dcac5 100644 --- a/crates/testfile/src/lib.rs +++ b/crates/testfile/src/lib.rs @@ -1,6 +1,11 @@ +mod campaign; pub mod error; mod test_config; +pub use campaign::{ + CampaignConfig, CampaignMixEntry, CampaignMode, CampaignSpam, CampaignStage, ResolvedMixEntry, + ResolvedStage, +}; pub use error::Error; pub use test_config::TestConfig; @@ -547,3 +552,50 @@ mod more_tests { Ok(()) } } + +#[cfg(test)] +mod campaign_tests { + use super::{CampaignConfig, CampaignMode}; + + #[test] + fn parses_campaign_and_resolves_stages() { + let toml = r#" +name = "composite" +description = "traffic mix of erc20 and groth16_verify" + +[spam] +mode = "tps" +rate = 20 +duration = 600 +seed = 42 + +[[spam.stage]] +name = "steady" +duration_secs = 600 + [[spam.stage.mix]] + scenario = "scenario:other_contract_call.toml" + share_pct = 95.0 + [[spam.stage.mix]] + scenario = "scenario:eth_transfer.toml" + share_pct = 4.8 + [[spam.stage.mix]] + scenario = "scenario:erc20_transfer.toml" + share_pct = 0.2 +"#; + + let cfg = CampaignConfig::from_toml_str(toml).expect("campaign parses"); + let stages = cfg.resolve().expect("campaign resolves"); + assert_eq!(stages.len(), 1); + let stage = &stages[0]; + assert_eq!(cfg.spam.mode, CampaignMode::Tps); + assert_eq!(stage.rate, 20); + assert_eq!(stage.duration, 600); + assert_eq!(stage.mix.len(), 3); + let total: u64 = stage.mix.iter().map(|m| m.rate).sum(); + assert_eq!(total, 20); + assert!(stage + .mix + .iter() + .any(|m| m.scenario.contains("erc20_transfer"))); + } +} diff --git a/docs/campaigns.md b/docs/campaigns.md new file mode 100644 index 00000000..23622e7c --- /dev/null +++ b/docs/campaigns.md @@ -0,0 +1,116 @@ +# Campaigns (Composite / Meta-Scenarios) + +Campaigns let you run multiple existing scenarios in parallel, optionally in sequential stages, to mimic real network mixes or replay traffic profiles. + +## File format + +```toml +name = "composite-example" +description = "Simple, Stress, and Reverts traffic mix" + +[setup] +scenarios = [ + "scenario:simple.toml", + "scenario:stress.toml", + "scenario:reverts.toml", +] + +[spam] +mode = "tps" # or "tpb" +rate = 20 # default rate if a stage omits one (TPS or TPB via mode) +duration = 600 # default duration (seconds if tps, blocks if tpb) +seed = 42 # optional; falls back to CLI --seed or seed file + +[[spam.stage]] +name = "steady" +duration_secs = 600 + [[spam.stage.mix]] + scenario = "scenario:simple.toml" + share_pct = 95.0 + [[spam.stage.mix]] + scenario = "scenario:stress.toml" + share_pct = 4.8 + [[spam.stage.mix]] + scenario = "scenario:reverts.toml" + share_pct = 0.2 +``` + +- `mode`: `tps` (per-second) or `tpb` (per-block). Stages can override rate/duration; otherwise they inherit from `[spam]`. +- `rate`: rate per mode (TPS if `mode="tps"`, TPB if `mode="tpb"`). Set once at `[spam]` or per stage. +- `duration` at `[spam]` is a **default per-stage** duration, not a total campaign time. Each stage runs for its own duration (seconds if `tps`, blocks if `tpb`), then the next stage starts. +- `share_pct`: scenario weight inside a stage; shares are normalized and rounded, and the last entry absorbs rounding drift to preserve the target rate. +- `[setup].scenarios`: run once, in order, before spamming. Uses the standard `setup` logic for each referenced scenario file. + +### Stage basics +- Stages run **sequentially**. Each stage inherits `mode`/`rate`/`duration` from `[spam]` unless the stage overrides them. +- Each stage performs its own setup/init (funding, deploy/config for builtins, scenario init), then starts its spammers at the resolved rate/mix. +- Within a stage, we spin up one spammer per `mix` entry at the computed per-scenario rate; they share a DB handle and run id. +- The next stage starts only after the previous one completes its **stage duration** (seconds for `tps`, blocks for `tpb`). Campaign duration is the sum of stage durations. +- Rates and shares are recomputed per stage, so you can ramp traffic up/down or change blends across time slices. + +### Validation +- You must provide either `[[spam.stage]]` entries **or** a shorthand `[spam]` + `[[spam.mix]]` with `spam.duration`. +- If `spam.stage` is present, `spam.mix` is rejected (prefer explicit stages). +- Each stage needs a duration (seconds for `tps`, blocks for `tpb`); if omitted, the `[spam].duration` default is used. +- Mix entries must be non-empty and share percentages must sum to a positive number (they are normalized automatically). + +### Shorthand single-stage form +If you omit `[[spam.stage]]` and instead set `spam.duration` plus `[[spam.mix]]`, Contender builds a single implicit stage named `steady`: +```toml +[spam] +mode = "tps" +rate = 20 +duration = 600 +seed = 42 + +[[spam.mix]] +scenario = "scenario:simple.toml" +share_pct = 95.0 +[[spam.mix]] +scenario = "scenario:stress.toml" +share_pct = 4.8 +[[spam.mix]] +scenario = "scenario:reverts.toml" +share_pct = 0.2 +``` +This is equivalent to writing a single explicit `[[spam.stage]]` named `steady` with the same rate/duration and mix. + +### Multi-stage example +See `campaigns/staged-example.toml` for a two-stage campaign that warms up at a lower TPS, then ramps to a steady-state mix. + +## CLI usage + +Preferred: new subcommand. +```bash +contender campaign ./campaigns/composite.toml \ + -r $RPC_URL -p $PKEY --pending-timeout 12 --rpc-batch-size 0 +``` + +Flags mirror `spam` where they make sense: +- Connection/auth: `--rpc-url`, `--priv-key/-p`, `--builder-url`, JWT/auth flags via `ScenarioSendTxs` options. +- Funding/runtime: `--pending-timeout`, `--accounts-per-agent`, `--rpc-batch-size`, `--ignore-receipts`, `--optimistic-nonces`, `--timeout`, `--report`. +- Setup controls: `--redeploy`, `--skip-setup` (mutually exclusive). + +## Reporting + +- Per-run: `contender report [-i --preceding-runs N]` +- Campaign summary: `contender report --campaign []` (alias: `--campaign-id`) + - If `` is omitted, the latest campaign is used. + - Generates per-run HTML for all runs in the campaign. + - Writes `campaign-.html` and `campaign-.json` under `~/.contender/reports/` with links, aggregate metrics, and per-stage/per-scenario rollups. + - If you pass `--report` to `contender campaign ...`, contender will also generate a report for the run-id range at the end of the campaign. + - If transaction logs are incomplete for any run (e.g., tracing/storage gaps), the campaign report will use stored run metadata for totals/durations and will display a notice; error counts may be under-reported in that case. +- When a stage has multiple `[[spam.stage.mix]]` entries, do not combine it with `--override-senders`; using a single sender across mixes is rejected because it would cause nonce conflicts. + +## Execution semantics + +1) **Setup**: load each scenario in `[setup].scenarios` and run its setup once (reuse existing setup command). +2) **Stages**: for each `[[spam.stage]]` + - Resolve stage mode/rate/duration from stage or `[spam]` defaults. + - Compute per-scenario rates: `scenario_rate = round(total_rate * share_pct/100)`, last entry fixed to hit the total. + - Spawn one spammer per scenario in the stage, sharing a common `run_id` and database handle. + - Stage ends after `duration` seconds/blocks. +3) **Reporting**: if `--report` is set, generate a report for all campaign runs after the final stage. + +Run metadata now records `campaign_name` and `stage_name` alongside the scenario label (`campaign:::`), so reports and DB exports can distinguish composite runs. + diff --git a/docs/cli.md b/docs/cli.md index aacf3412..81488a73 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -8,6 +8,7 @@ contender spam [OPTIONS] contender report [OPTIONS] contender admin [OPTIONS] contender db [OPTIONS] +contender campaign [OPTIONS] ``` Quick help: @@ -90,3 +91,13 @@ contender spam transfers -r $RPC_URL --tps 50 # Mixed load contender spam stress -r $RPC_URL --tps 150 -d 3 --report ``` + +## Campaigns (composite scenarios) + +Run multiple scenarios in parallel (optionally in stages) from a campaign TOML: + +```bash +contender campaign ./campaigns/base_composite.toml -r $RPC_URL -p $PRIVATE_KEY --report +``` + +See `docs/campaigns.md` for the file format and execution semantics. diff --git a/docs/scenarios.md b/docs/scenarios.md index ff2fe620..0c3bdadf 100644 --- a/docs/scenarios.md +++ b/docs/scenarios.md @@ -77,3 +77,13 @@ contender spam erc20 --tps 20 ``` If you are running a builtin scenario and encounter an issue, don't forget to check `contender spam --help` as well as `contender spam --help`. + +## Composite / Meta-Scenarios (Campaigns) + +To run multiple scenarios in parallel (with staged mixes), create a campaign TOML and run: + +```bash +contender campaign ./campaigns/base_composite.toml -r $RPC_URL -p $PKEY +``` + +Campaigns reference existing scenario files and specify per-stage mixes and rates. See `docs/campaigns.md` for the full format and examples. diff --git a/scenarios/uniV2.toml b/scenarios/uniV2.toml index edb237f0..15ce065d 100644 --- a/scenarios/uniV2.toml +++ b/scenarios/uniV2.toml @@ -53,6 +53,7 @@ kind = "univ2_create_pair_token1-token2" to = "{uniV2Factory}" signature = "function createPair(address tokenA, address tokenB) external returns (address pair)" args = ["{testToken}", "{testToken2}"] +gas_limit = 3000000 ## transfer funds to Unicheat ################################################## @@ -104,6 +105,7 @@ args = [ "{_sender}", "10000000000000", ] +gas_limit = 300000 ### SPAM