Skip to main content

forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    MultiContractRunner, MultiContractRunnerBuilder, TestFilter,
4    decode::decode_console_logs,
5    gas_report::GasReport,
6    multi_runner::matches_contract,
7    result::{SuiteResult, TestOutcome, TestStatus},
8    traces::{
9        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10        debug::{ContractSources, DebugTraceIdentifier},
11        decode_trace_arena, folded_stack_trace,
12        identifier::SignaturesIdentifier,
13    },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_block_explorers::EtherscanApiVersion;
20use foundry_cli::{
21    opts::{BuildOpts, GlobalArgs},
22    utils::{self, LoadConfig},
23};
24use foundry_common::{TestFunctionExt, compile::ProjectCompiler, evm::EvmArgs, fs, shell};
25use foundry_compilers::{
26    ProjectCompileOutput,
27    artifacts::output_selection::{ContractOutputSelection, OutputSelection},
28    compilers::{
29        Language,
30        multi::{MultiCompiler, MultiCompilerLanguage},
31        resolc::dual_compiled_contracts::DualCompiledContracts,
32    },
33    utils::source_files_iter,
34};
35use foundry_config::{
36    Config, figment,
37    figment::{
38        Metadata, Profile, Provider,
39        value::{Dict, Map},
40    },
41    filter::GlobMatcher,
42    revive::{self, PolkadotMode},
43};
44use foundry_debugger::Debugger;
45use foundry_evm::traces::identifier::TraceIdentifiers;
46use regex::Regex;
47use std::{
48    collections::{BTreeMap, BTreeSet},
49    fmt::Write,
50    path::PathBuf,
51    sync::{Arc, mpsc::channel},
52    time::{Duration, Instant},
53};
54use yansi::Paint;
55
56mod filter;
57mod summary;
58use crate::{result::TestKind, traces::render_trace_arena_inner};
59pub use filter::FilterArgs;
60use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
61use summary::{TestSummaryReport, format_invariant_metrics_table};
62
63// Loads project's figment and merges the build cli arguments into it
64foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
65
66/// CLI arguments for `forge test`.
67#[derive(Clone, Debug, Parser)]
68#[command(next_help_heading = "Test options")]
69pub struct TestArgs {
70    // Include global options for users of this struct.
71    #[command(flatten)]
72    pub global: GlobalArgs,
73
74    /// The contract file you want to test, it's a shortcut for --match-path.
75    #[arg(value_hint = ValueHint::FilePath)]
76    pub path: Option<GlobMatcher>,
77
78    /// Run a single test in the debugger.
79    ///
80    /// The matching test will be opened in the debugger regardless of the outcome of the test.
81    ///
82    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
83    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
84    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
85    debug: bool,
86
87    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
88    ///
89    /// A flame graph is used to visualize which functions or operations within the smart contract
90    /// are consuming the most gas overall in a sorted manner.
91    #[arg(long)]
92    flamegraph: bool,
93
94    /// Generate a flamechart for a single test. Implies `--decode-internal`.
95    ///
96    /// A flame chart shows the gas usage over time, illustrating when each function is
97    /// called (execution order) and how much gas it consumes at each point in the timeline.
98    #[arg(long, conflicts_with = "flamegraph")]
99    flamechart: bool,
100
101    /// Identify internal functions in traces.
102    ///
103    /// This will trace internal functions and decode stack parameters.
104    ///
105    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
106    /// single function is matched, similarly to `--debug`, for performance reasons.
107    #[arg(long)]
108    decode_internal: bool,
109
110    /// Dumps all debugger steps to file.
111    #[arg(
112        long,
113        requires = "debug",
114        value_hint = ValueHint::FilePath,
115        value_name = "PATH"
116    )]
117    dump: Option<PathBuf>,
118
119    /// Print a gas report.
120    #[arg(long, env = "FORGE_GAS_REPORT")]
121    gas_report: bool,
122
123    /// Check gas snapshots against previous runs.
124    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
125    gas_snapshot_check: Option<bool>,
126
127    /// Enable/disable recording of gas snapshot results.
128    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
129    gas_snapshot_emit: Option<bool>,
130
131    /// Exit with code 0 even if a test fails.
132    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
133    allow_failure: bool,
134
135    /// Suppress successful test traces and show only traces for failures.
136    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
137    suppress_successful_traces: bool,
138
139    /// Output test results as JUnit XML report.
140    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
141    pub junit: bool,
142
143    /// Stop running tests after the first failure.
144    #[arg(long)]
145    pub fail_fast: bool,
146
147    /// The Etherscan (or equivalent) API key.
148    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
149    etherscan_api_key: Option<String>,
150
151    /// The Etherscan API version.
152    #[arg(long, env = "ETHERSCAN_API_VERSION", value_name = "VERSION")]
153    etherscan_api_version: Option<EtherscanApiVersion>,
154
155    /// List tests instead of running them.
156    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
157    list: bool,
158
159    /// Use pallet-revive runtime backend (evm or pvm mode).
160    ///
161    /// Controls which runtime backend to use during test execution:
162    /// - No flag or --polkadot=evm: Use EVM backend (single compilation, fast)
163    /// - --polkadot=pvm: Use PVM backend (dual compilation required)
164    #[arg(
165        long = "polkadot",
166        value_name = "MODE",
167        num_args = 0..=1,
168        default_missing_value = "evm",
169        require_equals = true
170    )]
171    polkadot: Option<PolkadotMode>,
172
173    /// Set seed used to generate randomness during your fuzz runs.
174    #[arg(long)]
175    pub fuzz_seed: Option<U256>,
176
177    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
178    pub fuzz_runs: Option<u64>,
179
180    /// Timeout for each fuzz run in seconds.
181    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
182    pub fuzz_timeout: Option<u64>,
183
184    /// File to rerun fuzz failures from.
185    #[arg(long)]
186    pub fuzz_input_file: Option<String>,
187
188    /// Maximum integer value for fuzz tests.
189    /// Accepts decimal, hex (0x...), or keywords: "u128", "u64".
190    #[arg(long, env = "FOUNDRY_FUZZ_INT_MAX", value_name = "VALUE")]
191    pub fuzz_int_max: Option<String>,
192
193    /// Show test execution progress.
194    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
195    pub show_progress: bool,
196
197    /// Re-run recorded test failures from last run.
198    /// If no failure recorded then regular test run is performed.
199    #[arg(long)]
200    pub rerun: bool,
201
202    /// Print test summary table.
203    #[arg(long, help_heading = "Display options")]
204    pub summary: bool,
205
206    /// Print detailed test summary table.
207    #[arg(long, help_heading = "Display options", requires = "summary")]
208    pub detailed: bool,
209
210    #[command(flatten)]
211    filter: FilterArgs,
212
213    #[command(flatten)]
214    evm: EvmArgs,
215
216    #[command(flatten)]
217    pub build: BuildOpts,
218
219    #[command(flatten)]
220    pub watch: WatchArgs,
221}
222
223impl TestArgs {
224    pub async fn run(self) -> Result<TestOutcome> {
225        trace!(target: "forge::test", "executing test command");
226        self.execute_tests().await
227    }
228
229    /// Returns sources which include any tests to be executed.
230    /// If no filters are provided, sources are filtered by existence of test/invariant methods in
231    /// them, If filters are provided, sources are additionally filtered by them.
232    pub fn get_sources_to_compile(
233        &self,
234        config: &Config,
235        filter: &ProjectPathsAwareFilter,
236    ) -> Result<BTreeSet<PathBuf>> {
237        let mut project = config.create_project(true, true)?;
238        project.update_output_selection(|selection| {
239            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
240        });
241
242        let output = project.compile()?;
243
244        if output.has_compiler_errors() {
245            sh_println!("{output}")?;
246            eyre::bail!("Compilation failed");
247        }
248
249        // ABIs of all sources
250        let abis = output
251            .into_artifacts()
252            .filter_map(|(id, artifact)| artifact.abi.map(|abi| (id, abi)))
253            .collect::<BTreeMap<_, _>>();
254
255        // Filter sources by their abis and contract names.
256        let mut test_sources = abis
257            .iter()
258            .filter(|(id, abi)| matches_contract(id, abi, filter))
259            .map(|(id, _)| id.source.clone())
260            .collect::<BTreeSet<_>>();
261
262        if test_sources.is_empty() {
263            if filter.is_empty() {
264                sh_println!(
265                    "No tests found in project! \
266                        Forge looks for functions that starts with `test`."
267                )?;
268            } else {
269                sh_println!("No tests match the provided pattern:")?;
270                sh_print!("{filter}")?;
271
272                // Try to suggest a test when there's no match
273                if let Some(test_pattern) = &filter.args().test_pattern {
274                    let test_name = test_pattern.as_str();
275                    let candidates = abis
276                        .into_iter()
277                        .filter(|(id, _)| {
278                            filter.matches_path(&id.source) && filter.matches_contract(&id.name)
279                        })
280                        .flat_map(|(_, abi)| abi.functions.into_keys())
281                        .collect::<Vec<_>>();
282                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
283                        sh_println!("\nDid you mean `{suggestion}`?")?;
284                    }
285                }
286            }
287
288            eyre::bail!("No tests to run");
289        }
290
291        // Always recompile all sources to ensure that `getCode` cheatcode can use any artifact.
292        test_sources.extend(source_files_iter(
293            &project.paths.sources,
294            MultiCompilerLanguage::FILE_EXTENSIONS,
295        ));
296
297        Ok(test_sources)
298    }
299
300    /// Executes all the tests in the project.
301    ///
302    /// This will trigger the build process first. On success all test contracts that match the
303    /// configured filter will be executed
304    ///
305    /// Returns the test results for all matching tests.
306    pub async fn execute_tests(mut self) -> Result<TestOutcome> {
307        // Merge all configs.
308        let (mut config, mut evm_opts) = self.load_config_and_evm_opts()?;
309
310        // Override polkadot mode from CLI flag if provided
311        if let Some(polkadot_mode) = self.polkadot {
312            config.polkadot.polkadot = Some(polkadot_mode);
313            // Auto-enable resolc_compile when using --polkadot=pvm (required for dual compilation)
314            if polkadot_mode == PolkadotMode::Pvm {
315                tracing::warn!(
316                    "Using 'pvm' backend is an experimental feature and may lead to unexpected behavior in tests."
317                );
318                config.polkadot.resolc_compile = true;
319            }
320        }
321
322        // Auto-set polkadot=pvm when --resolc is used without explicit --polkadot flag
323        if config.polkadot.resolc_compile && config.polkadot.polkadot.is_none() {
324            tracing::warn!(
325                "Using 'pvm' backend is an experimental feature and may lead to unexpected behavior in tests."
326            );
327            config.polkadot.polkadot = Some(PolkadotMode::Pvm);
328        }
329
330        let mut strategy = utils::get_executor_strategy(&config);
331
332        // Explicitly enable isolation for gas reports for more correct gas accounting.
333        if self.gas_report {
334            evm_opts.isolate = true;
335        } else {
336            // Do not collect gas report traces if gas report is not enabled.
337            config.fuzz.gas_report_samples = 0;
338            config.invariant.gas_report_samples = 0;
339        }
340
341        // Install missing dependencies.
342        if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings {
343            // need to re-configure here to also catch additional remappings
344            config = self.load_config()?;
345        }
346        if config.polkadot.resolc_compile {
347            config.extra_output.push(ContractOutputSelection::StorageLayout);
348        }
349        // Set up the project.
350        let project = config.project()?;
351
352        let filter = self.filter(&config)?;
353        trace!(target: "forge::test", ?filter, "using filter");
354
355        let sources_to_compile = self.get_sources_to_compile(&config, &filter)?;
356
357        // Handle compilation based on whether dual compilation is enabled
358        let (output, dual_compiled_contracts) = if config.polkadot.resolc_compile {
359            // Dual compilation mode: compile both solc and resolc
360
361            // Compile with solc to a subdirectory
362            let mut solc_config = config.clone();
363            solc_config.out = solc_config.out.join(revive::SOLC_ARTIFACTS_SUBDIR);
364            solc_config.polkadot = Default::default();
365            solc_config.build_info_path = Some(solc_config.out.join("build-info"));
366            let solc_project = solc_config.project()?;
367            let compiler = ProjectCompiler::new()
368                .dynamic_test_linking(config.dynamic_test_linking)
369                .quiet(shell::is_json() || self.junit)
370                .files(sources_to_compile.clone());
371
372            let solc_output = compiler.compile(&solc_project)?;
373
374            // Compile with resolc to the main output directory
375            let resolc_project = config.clone().project()?;
376
377            let resolc_compiler = ProjectCompiler::new()
378                .quiet(shell::is_json() || self.junit)
379                .files(sources_to_compile)
380                .size_limits(revive::CONTRACT_SIZE_LIMIT, revive::CONTRACT_SIZE_LIMIT);
381
382            let resolc_output = resolc_compiler.compile(&resolc_project)?;
383
384            // Create dual compiled contracts
385            let dual_compiled_contracts = DualCompiledContracts::new(
386                &solc_output,
387                &resolc_output,
388                &solc_project.paths,
389                &resolc_project.paths,
390            );
391
392            (solc_output, Some(dual_compiled_contracts))
393        } else {
394            // Single compilation mode: compile only with solc
395
396            let compiler: ProjectCompiler = ProjectCompiler::new()
397                .dynamic_test_linking(config.dynamic_test_linking)
398                .quiet(shell::is_json() || self.junit)
399                .files(sources_to_compile.clone());
400
401            let solc_output = compiler.compile(&project)?;
402
403            (solc_output, None)
404        };
405
406        // Create test options from general project settings and compiler output.
407        let project_root = &project.paths.root;
408
409        let should_debug = self.debug;
410        let should_draw = self.flamegraph || self.flamechart;
411
412        // Determine print verbosity and executor verbosity.
413        let verbosity = evm_opts.verbosity;
414        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
415            evm_opts.verbosity = 3;
416        }
417
418        let env = evm_opts.evm_env().await?;
419
420        // Enable internal tracing for more informative flamegraph.
421        if should_draw && !self.decode_internal {
422            self.decode_internal = true;
423        }
424
425        // Choose the internal function tracing mode, if --decode-internal is provided.
426        let decode_internal = if self.decode_internal {
427            // If more than one function matched, we enable simple tracing.
428            // If only one function matched, we enable full tracing. This is done in `run_tests`.
429            InternalTraceMode::Simple
430        } else {
431            InternalTraceMode::None
432        };
433
434        // Prepare the test builder.
435        let config = Arc::new(config);
436
437        // Set dual compiled contracts on the strategy
438        strategy.runner.revive_set_dual_compiled_contracts(
439            strategy.context.as_mut(),
440            dual_compiled_contracts.unwrap_or_default(),
441        );
442
443        let runner = MultiContractRunnerBuilder::new(config.clone())
444            .set_debug(should_debug)
445            .set_decode_internal(decode_internal)
446            .initial_balance(evm_opts.initial_balance)
447            .evm_spec(config.evm_spec_id())
448            .sender(evm_opts.sender)
449            .with_fork(evm_opts.get_fork(&config, env.clone()))
450            .enable_isolation(evm_opts.isolate)
451            .odyssey(evm_opts.odyssey)
452            .build::<MultiCompiler>(strategy, project_root, &output, env, evm_opts)?;
453
454        let libraries = runner.libraries.clone();
455        let mut outcome = self.run_tests(runner, config, verbosity, &filter, &output).await?;
456
457        if should_draw {
458            let (suite_name, test_name, mut test_result) =
459                outcome.remove_first().ok_or_eyre("no tests were executed")?;
460
461            let (_, arena) = test_result
462                .traces
463                .iter_mut()
464                .find(|(kind, _)| *kind == TraceKind::Execution)
465                .unwrap();
466
467            // Decode traces.
468            let decoder = outcome.last_run_decoder.as_ref().unwrap();
469            decode_trace_arena(arena, decoder).await;
470            let mut fst = folded_stack_trace::build(arena);
471
472            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
473            let contract = suite_name.split(':').next_back().unwrap();
474            let test_name = test_name.trim_end_matches("()");
475            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
476            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
477            let file = std::io::BufWriter::new(file);
478
479            let mut options = inferno::flamegraph::Options::default();
480            options.title = format!("{label} {contract}::{test_name}");
481            options.count_name = "gas".to_string();
482            if self.flamechart {
483                options.flame_chart = true;
484                fst.reverse();
485            }
486
487            // Generate SVG.
488            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
489                .wrap_err("failed to write svg")?;
490            sh_println!("Saved to {file_name}")?;
491
492            // Open SVG in default program.
493            if let Err(e) = opener::open(&file_name) {
494                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
495            }
496        }
497
498        if should_debug {
499            // Get first non-empty suite result. We will have only one such entry.
500            let (_, _, test_result) =
501                outcome.remove_first().ok_or_eyre("no tests were executed")?;
502
503            let sources =
504                ContractSources::from_project_output(&output, project.root(), Some(&libraries))?;
505
506            // Run the debugger.
507            let mut builder = Debugger::builder()
508                .traces(
509                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
510                )
511                .sources(sources)
512                .breakpoints(test_result.breakpoints.clone());
513
514            if let Some(decoder) = &outcome.last_run_decoder {
515                builder = builder.decoder(decoder);
516            }
517
518            let mut debugger = builder.build();
519            if let Some(dump_path) = self.dump {
520                debugger.dump_to_file(&dump_path)?;
521            } else {
522                debugger.try_run_tui()?;
523            }
524        }
525
526        Ok(outcome)
527    }
528
529    /// Run all tests that matches the filter predicate from a test runner
530    pub async fn run_tests(
531        &self,
532        mut runner: MultiContractRunner,
533        config: Arc<Config>,
534        verbosity: u8,
535        filter: &ProjectPathsAwareFilter,
536        output: &ProjectCompileOutput,
537    ) -> eyre::Result<TestOutcome> {
538        if self.list {
539            return list(runner, filter);
540        }
541
542        trace!(target: "forge::test", "running all tests");
543
544        // If we need to render to a serialized format, we should not print anything else to stdout.
545        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
546
547        let num_filtered = runner.matching_test_functions(filter).count();
548        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
549            let action = if self.flamegraph {
550                "generate a flamegraph"
551            } else if self.flamechart {
552                "generate a flamechart"
553            } else {
554                "run the debugger"
555            };
556            let filter = if filter.is_empty() {
557                String::new()
558            } else {
559                format!("\n\nFilter used:\n{filter}")
560            };
561            eyre::bail!(
562                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
563                 Use --match-contract and --match-path to further limit the search.{filter}",
564            );
565        }
566
567        // If exactly one test matched, we enable full tracing.
568        if num_filtered == 1 && self.decode_internal {
569            runner.decode_internal = InternalTraceMode::Full;
570        }
571
572        // Run tests in a non-streaming fashion and collect results for serialization.
573        if !self.gas_report && !self.summary && shell::is_json() {
574            let mut results = runner.test_collect(filter)?;
575            results.values_mut().for_each(|suite_result| {
576                for test_result in suite_result.test_results.values_mut() {
577                    if verbosity >= 2 {
578                        // Decode logs at level 2 and above.
579                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
580                    } else {
581                        // Empty logs for non verbose runs.
582                        test_result.logs = vec![];
583                    }
584                }
585            });
586            sh_println!("{}", serde_json::to_string(&results)?)?;
587            return Ok(TestOutcome::new(results, self.allow_failure));
588        }
589
590        if self.junit {
591            let results = runner.test_collect(filter)?;
592            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
593            return Ok(TestOutcome::new(results, self.allow_failure));
594        }
595
596        let remote_chain_id = runner.evm_opts.get_remote_chain_id().await;
597        let known_contracts = runner.known_contracts.clone();
598
599        let libraries = runner.libraries.clone();
600
601        // Run tests in a streaming fashion.
602        let (tx, rx) = channel::<(String, SuiteResult)>();
603        let timer = Instant::now();
604        let show_progress = config.show_progress;
605        let handle = tokio::task::spawn_blocking({
606            let filter = filter.clone();
607            move || runner.test(&filter, tx, show_progress)
608        });
609
610        // Set up trace identifiers.
611        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
612
613        // Avoid using etherscan for gas report as we decode more traces and this will be
614        // expensive.
615        if !self.gas_report {
616            identifier = identifier.with_etherscan(&config, remote_chain_id)?;
617        }
618
619        // Build the trace decoder.
620        let mut builder = CallTraceDecoderBuilder::new()
621            .with_known_contracts(&known_contracts)
622            .with_verbosity(verbosity);
623        // Signatures are of no value for gas reports.
624        if !self.gas_report {
625            builder =
626                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
627        }
628
629        if self.decode_internal {
630            let sources =
631                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
632            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
633        }
634        let mut decoder = builder.build();
635
636        let mut gas_report = self.gas_report.then(|| {
637            GasReport::new(
638                config.gas_reports.clone(),
639                config.gas_reports_ignore.clone(),
640                config.gas_reports_include_tests,
641            )
642        });
643
644        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
645
646        let mut outcome = TestOutcome::empty(self.allow_failure);
647
648        let mut any_test_failed = false;
649        for (contract_name, suite_result) in rx {
650            let tests = &suite_result.test_results;
651
652            // Clear the addresses and labels from previous test.
653            decoder.clear_addresses();
654
655            // We identify addresses if we're going to print *any* trace or gas report.
656            let identify_addresses = verbosity >= 3
657                || self.gas_report
658                || self.debug
659                || self.flamegraph
660                || self.flamechart;
661
662            // Print suite header.
663            if !silent {
664                sh_println!()?;
665                for warning in &suite_result.warnings {
666                    sh_warn!("{warning}")?;
667                }
668                if !tests.is_empty() {
669                    let len = tests.len();
670                    let tests = if len > 1 { "tests" } else { "test" };
671                    sh_println!("Ran {len} {tests} for {contract_name}")?;
672                }
673            }
674
675            // Process individual test results, printing logs and traces when necessary.
676            for (name, result) in tests {
677                let show_traces =
678                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
679                if !silent {
680                    sh_println!("{}", result.short_result(name))?;
681
682                    // Display invariant metrics if invariant kind.
683                    if let TestKind::Invariant { metrics, .. } = &result.kind
684                        && !metrics.is_empty()
685                    {
686                        let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
687                    }
688
689                    // We only display logs at level 2 and above
690                    if verbosity >= 2 && show_traces {
691                        // We only decode logs from Hardhat and DS-style console events
692                        let console_logs = decode_console_logs(&result.logs);
693                        if !console_logs.is_empty() {
694                            sh_println!("Logs:")?;
695                            for log in console_logs {
696                                sh_println!("  {log}")?;
697                            }
698                            sh_println!()?;
699                        }
700                    }
701                }
702
703                // We shouldn't break out of the outer loop directly here so that we finish
704                // processing the remaining tests and print the suite summary.
705                any_test_failed |= result.status == TestStatus::Failure;
706
707                // Clear the addresses and labels from previous runs.
708                decoder.clear_addresses();
709                decoder
710                    .labels
711                    .extend(result.labeled_addresses.iter().map(|(k, v)| (*k, v.clone())));
712
713                // Identify addresses and decode traces.
714                let mut decoded_traces = Vec::with_capacity(result.traces.len());
715                for (kind, arena) in &mut result.traces.clone() {
716                    if identify_addresses {
717                        decoder.identify(arena, &mut identifier);
718                    }
719
720                    // verbosity:
721                    // - 0..3: nothing
722                    // - 3: only display traces for failed tests
723                    // - 4: also display the setup trace for failed tests
724                    // - 5..: display all traces for all tests, including storage changes
725                    let should_include = match kind {
726                        TraceKind::Execution => {
727                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
728                        }
729                        TraceKind::Setup => {
730                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
731                        }
732                        TraceKind::Deployment => false,
733                    };
734
735                    if should_include {
736                        decode_trace_arena(arena, &decoder).await;
737                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
738                    }
739                }
740
741                if !silent && show_traces && !decoded_traces.is_empty() {
742                    sh_println!("Traces:")?;
743                    for trace in &decoded_traces {
744                        sh_println!("{trace}")?;
745                    }
746                }
747
748                if let Some(gas_report) = &mut gas_report {
749                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
750
751                    for trace in &result.gas_report_traces {
752                        decoder.clear_addresses();
753
754                        // Re-execute setup and deployment traces to collect identities created in
755                        // setUp and constructor.
756                        for (kind, arena) in &result.traces {
757                            if !matches!(kind, TraceKind::Execution) {
758                                decoder.identify(arena, &mut identifier);
759                            }
760                        }
761
762                        for arena in trace {
763                            decoder.identify(arena, &mut identifier);
764                            gas_report.analyze([arena], &decoder).await;
765                        }
766                    }
767                }
768
769                // Collect and merge gas snapshots.
770                for (group, new_snapshots) in &result.gas_snapshots {
771                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
772                }
773            }
774
775            // Write gas snapshots to disk if any were collected.
776            if !gas_snapshots.is_empty() {
777                // By default `gas_snapshot_check` is set to `false` in the config.
778                //
779                // The user can either:
780                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
781                // - Pass `--gas-snapshot-check=true` as a CLI argument.
782                // - Set `gas_snapshot_check = true` in the config.
783                //
784                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
785                // and the environment variable, disabling the check if `false` is passed.
786                //
787                // Exiting early with code 1 if differences are found.
788                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
789                    let differences_found = gas_snapshots.clone().into_iter().fold(
790                        false,
791                        |mut found, (group, snapshots)| {
792                            // If the snapshot file doesn't exist, we can't compare so we skip.
793                            if !&config.snapshots.join(format!("{group}.json")).exists() {
794                                return false;
795                            }
796
797                            let previous_snapshots: BTreeMap<String, String> =
798                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
799                                    .expect("Failed to read snapshots from disk");
800
801                            let diff: BTreeMap<_, _> = snapshots
802                                .iter()
803                                .filter_map(|(k, v)| {
804                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
805                                        if previous_snapshot != v {
806                                            Some((
807                                                k.clone(),
808                                                (previous_snapshot.clone(), v.clone()),
809                                            ))
810                                        } else {
811                                            None
812                                        }
813                                    })
814                                })
815                                .collect();
816
817                            if !diff.is_empty() {
818                                let _ = sh_eprintln!(
819                                    "{}",
820                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
821                                );
822
823                                for (key, (previous_snapshot, snapshot)) in &diff {
824                                    let _ = sh_eprintln!(
825                                        "{}",
826                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
827                                    );
828                                }
829
830                                found = true;
831                            }
832
833                            found
834                        },
835                    );
836
837                    if differences_found {
838                        sh_eprintln!()?;
839                        eyre::bail!("Snapshots differ from previous run");
840                    }
841                }
842
843                // By default `gas_snapshot_emit` is set to `true` in the config.
844                //
845                // The user can either:
846                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
847                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
848                // - Set `gas_snapshot_emit = false` in the config.
849                //
850                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
851                // and the environment variable, enabling the check if `true` is passed.
852                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
853                    // Create `snapshots` directory if it doesn't exist.
854                    fs::create_dir_all(&config.snapshots)?;
855
856                    // Write gas snapshots to disk per group.
857                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
858                        fs::write_pretty_json_file(
859                            &config.snapshots.join(format!("{group}.json")),
860                            &snapshots,
861                        )
862                        .expect("Failed to write gas snapshots to disk");
863                    });
864                }
865            }
866
867            // Print suite summary.
868            if !silent {
869                sh_println!("{}", suite_result.summary())?;
870            }
871
872            // Add the suite result to the outcome.
873            outcome.results.insert(contract_name, suite_result);
874
875            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
876            if self.fail_fast && any_test_failed {
877                break;
878            }
879        }
880        outcome.last_run_decoder = Some(decoder);
881        let duration = timer.elapsed();
882
883        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
884
885        if let Some(gas_report) = gas_report {
886            let finalized = gas_report.finalize();
887            sh_println!("{}", &finalized)?;
888            outcome.gas_report = Some(finalized);
889        }
890
891        if !self.summary && !shell::is_json() {
892            sh_println!("{}", outcome.summary(duration))?;
893        }
894
895        if self.summary && !outcome.results.is_empty() {
896            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
897            sh_println!("{}", &summary_report)?;
898        }
899
900        // Reattach the task.
901        if let Err(e) = handle.await {
902            match e.try_into_panic() {
903                Ok(payload) => std::panic::resume_unwind(payload),
904                Err(e) => return Err(e.into()),
905            }
906        }
907
908        // Persist test run failures to enable replaying.
909        persist_run_failures(&config, &outcome);
910
911        Ok(outcome)
912    }
913
914    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
915    /// Loads and applies filter from file if only last test run failures performed.
916    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
917        let mut filter = self.filter.clone();
918        if self.rerun {
919            filter.test_pattern = last_run_failures(config);
920        }
921        if filter.path_pattern.is_some() {
922            if self.path.is_some() {
923                bail!("Can not supply both --match-path and |path|");
924            }
925        } else {
926            filter.path_pattern = self.path.clone();
927        }
928        Ok(filter.merge_with_config(config))
929    }
930
931    /// Returns whether `BuildArgs` was configured with `--watch`
932    pub fn is_watch(&self) -> bool {
933        self.watch.watch.is_some()
934    }
935
936    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
937    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
938        self.watch.watchexec_config(|| {
939            let config = self.load_config()?;
940            let foundry_toml: PathBuf = config.root.join(Config::FILE_NAME);
941            Ok([config.src, config.test, config.script, foundry_toml])
942        })
943    }
944}
945
946impl Provider for TestArgs {
947    fn metadata(&self) -> Metadata {
948        Metadata::named("Core Build Args Provider")
949    }
950
951    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
952        let mut dict = Dict::default();
953
954        let mut fuzz_dict = Dict::default();
955        if let Some(fuzz_seed) = self.fuzz_seed {
956            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
957        }
958        if let Some(fuzz_runs) = self.fuzz_runs {
959            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
960        }
961        if let Some(fuzz_timeout) = self.fuzz_timeout {
962            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
963        }
964        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
965            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
966        }
967        if let Some(ref fuzz_int_max) = self.fuzz_int_max
968            && let Ok(max_val) = parse_fuzz_int_max(fuzz_int_max)
969        {
970            fuzz_dict.insert("max_fuzz_int".to_string(), max_val.to_string().into());
971        }
972        dict.insert("fuzz".to_string(), fuzz_dict.into());
973
974        if let Some(etherscan_api_key) =
975            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
976        {
977            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
978        }
979
980        if let Some(api_version) = &self.etherscan_api_version {
981            dict.insert("etherscan_api_version".to_string(), api_version.to_string().into());
982        }
983
984        if self.show_progress {
985            dict.insert("show_progress".to_string(), true.into());
986        }
987
988        Ok(Map::from([(Config::selected_profile(), dict)]))
989    }
990}
991
992/// Lists all matching tests
993fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
994    let results = runner.list(filter);
995
996    if shell::is_json() {
997        sh_println!("{}", serde_json::to_string(&results)?)?;
998    } else {
999        for (file, contracts) in &results {
1000            sh_println!("{file}")?;
1001            for (contract, tests) in contracts {
1002                sh_println!("  {contract}")?;
1003                sh_println!("    {}\n", tests.join("\n    "))?;
1004            }
1005        }
1006    }
1007    Ok(TestOutcome::empty(false))
1008}
1009
1010/// Load persisted filter (with last test run failures) from file.
1011fn last_run_failures(config: &Config) -> Option<regex::Regex> {
1012    match fs::read_to_string(&config.test_failures_file) {
1013        Ok(filter) => Some(Regex::new(&filter).unwrap()),
1014        Err(_) => None,
1015    }
1016}
1017
1018/// Persist filter with last test run failures (only if there's any failure).
1019fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
1020    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
1021        let mut filter = String::new();
1022        let mut failures = outcome.failures().peekable();
1023        while let Some((test_name, _)) = failures.next() {
1024            if test_name.is_any_test()
1025                && let Some(test_match) = test_name.split("(").next()
1026            {
1027                filter.push_str(test_match);
1028                if failures.peek().is_some() {
1029                    filter.push('|');
1030                }
1031            }
1032        }
1033        let _ = fs::write(&config.test_failures_file, filter);
1034    }
1035}
1036
1037/// Generate test report in JUnit XML report format.
1038fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
1039    let mut total_duration = Duration::default();
1040    let mut junit_report = Report::new("Test run");
1041    junit_report.set_timestamp(Utc::now());
1042    for (suite_name, suite_result) in results {
1043        let mut test_suite = TestSuite::new(suite_name);
1044        total_duration += suite_result.duration;
1045        test_suite.set_time(suite_result.duration);
1046        test_suite.set_system_out(suite_result.summary());
1047        for (test_name, test_result) in &suite_result.test_results {
1048            let mut test_status = match test_result.status {
1049                TestStatus::Success => TestCaseStatus::success(),
1050                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
1051                TestStatus::Skipped => TestCaseStatus::skipped(),
1052            };
1053            if let Some(reason) = &test_result.reason {
1054                test_status.set_message(reason);
1055            }
1056
1057            let mut test_case = TestCase::new(test_name, test_status);
1058            test_case.set_time(test_result.duration);
1059
1060            let mut sys_out = String::new();
1061            let result_report = test_result.kind.report();
1062            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
1063            if verbosity >= 2 && !test_result.logs.is_empty() {
1064                write!(sys_out, "\\nLogs:\\n").unwrap();
1065                let console_logs = decode_console_logs(&test_result.logs);
1066                for log in console_logs {
1067                    write!(sys_out, "  {log}\\n").unwrap();
1068                }
1069            }
1070
1071            test_case.set_system_out(sys_out);
1072            test_suite.add_test_case(test_case);
1073        }
1074        junit_report.add_test_suite(test_suite);
1075    }
1076    junit_report.set_time(total_duration);
1077    junit_report
1078}
1079
1080/// Parses the fuzz-int-max value from string to U256.
1081/// Supports:
1082/// - Decimal: "340282366920938463463374607431768211455"
1083/// - Hex: "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
1084/// - Keywords: "u128", "u128_max", "u64", "u64_max"
1085fn parse_fuzz_int_max(value: &str) -> Result<U256> {
1086    let value = value.trim().to_lowercase();
1087    match value.as_str() {
1088        "u128" | "u128_max" => Ok(U256::from(u128::MAX)),
1089        "u64" | "u64_max" => Ok(U256::from(u64::MAX)),
1090        _ if value.starts_with("0x") => U256::from_str_radix(&value[2..], 16)
1091            .map_err(|e| eyre::eyre!("Invalid hex value for --fuzz-int-max: {e}")),
1092        _ => {
1093            value.parse::<U256>().map_err(|e| eyre::eyre!("Invalid value for --fuzz-int-max: {e}"))
1094        }
1095    }
1096}
1097
1098#[cfg(test)]
1099mod tests {
1100    use super::*;
1101    use foundry_config::Chain;
1102
1103    #[test]
1104    fn watch_parse() {
1105        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
1106        assert!(args.watch.watch.is_some());
1107    }
1108
1109    #[test]
1110    fn fuzz_seed() {
1111        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
1112        assert!(args.fuzz_seed.is_some());
1113    }
1114
1115    // <https://github.com/foundry-rs/foundry/issues/5913>
1116    #[test]
1117    fn fuzz_seed_exists() {
1118        let args: TestArgs =
1119            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
1120        assert!(args.fuzz_seed.is_some());
1121    }
1122
1123    #[test]
1124    fn extract_chain() {
1125        let test = |arg: &str, expected: Chain| {
1126            let args = TestArgs::parse_from(["foundry-cli", arg]);
1127            assert_eq!(args.evm.env.chain, Some(expected));
1128            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1129            assert_eq!(config.chain, Some(expected));
1130            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1131        };
1132        test("--chain-id=1", Chain::mainnet());
1133        test("--chain-id=42", Chain::from_id(42));
1134    }
1135}