diff --git a/crates/evm/coverage/src/analysis.rs b/crates/evm/coverage/src/analysis.rs index ea69b3113a9d..9f1977dee51d 100644 --- a/crates/evm/coverage/src/analysis.rs +++ b/crates/evm/coverage/src/analysis.rs @@ -1,5 +1,6 @@ use super::{CoverageItem, CoverageItemKind, SourceLocation}; use alloy_primitives::map::HashMap; +use core::fmt; use foundry_common::TestFunctionExt; use foundry_compilers::artifacts::ast::{self, Ast, Node, NodeType}; use rayon::prelude::*; @@ -9,7 +10,7 @@ use std::{borrow::Cow, sync::Arc}; #[derive(Clone, Debug)] pub struct ContractVisitor<'a> { /// The source ID of the contract. - source_id: usize, + source_id: SourceIdentifier, /// The source code that contains the AST being walked. source: &'a str, @@ -26,7 +27,7 @@ pub struct ContractVisitor<'a> { } impl<'a> ContractVisitor<'a> { - pub fn new(source_id: usize, source: &'a str, contract_name: &'a Arc) -> Self { + pub fn new(source_id: SourceIdentifier, source: &'a str, contract_name: &'a Arc) -> Self { Self { source_id, source, contract_name, branch_id: 0, last_line: 0, items: Vec::new() } } @@ -473,7 +474,7 @@ impl<'a> ContractVisitor<'a> { let loc_start = self.source.char_indices().map(|(i, _)| i).nth(loc.start).unwrap_or_default(); SourceLocation { - source_id: self.source_id, + source_id: self.source_id.clone(), contract_name: self.contract_name.clone(), start: loc.start as u32, length: loc.length.map(|x| x as u32), @@ -538,7 +539,7 @@ impl<'a> SourceAnalyzer<'a> { .sources .sources .par_iter() - .flat_map_iter(|(&source_id, SourceFile { source, ast })| { + .flat_map_iter(|(source_id, SourceFile { source, ast })| { ast.nodes.iter().map(move |node| { if !matches!(node.node_type, NodeType::ContractDefinition) { return Ok(vec![]); @@ -556,7 +557,7 @@ impl<'a> SourceAnalyzer<'a> { .attribute("name") .ok_or_else(|| eyre::eyre!("Contract has no name"))?; - let mut visitor = ContractVisitor::new(source_id, source, &name); + let mut visitor = ContractVisitor::new(source_id.clone(), source, &name); visitor.visit_contract(node)?; let mut items = visitor.items; @@ -583,7 +584,31 @@ impl<'a> SourceAnalyzer<'a> { #[derive(Debug, Default)] pub struct SourceFiles<'a> { /// The versioned sources. - pub sources: HashMap>, + /// Keyed by build_id and source_id. + pub sources: HashMap>, +} + +/// Serves as a unique identifier for sources across multiple compiler runs. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct SourceIdentifier { + /// Source ID is unique for each source file per compilation job but may not be across + /// different jobs. + pub source_id: usize, + /// Artifact build id is same for all sources in a single compilation job. But always unique + /// across different jobs. + pub build_id: String, +} + +impl fmt::Display for SourceIdentifier { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "source_id={} build_id={}", self.source_id, self.build_id,) + } +} + +impl SourceIdentifier { + pub fn new(source_id: usize, build_id: String) -> Self { + Self { source_id, build_id } + } } /// The source code and AST of a file. diff --git a/crates/evm/coverage/src/anchors.rs b/crates/evm/coverage/src/anchors.rs index 6643524d61de..4d5e9e45e8f4 100644 --- a/crates/evm/coverage/src/anchors.rs +++ b/crates/evm/coverage/src/anchors.rs @@ -1,3 +1,5 @@ +use crate::analysis::SourceIdentifier; + use super::{CoverageItem, CoverageItemKind, ItemAnchor, SourceLocation}; use alloy_primitives::map::{DefaultHashBuilder, HashMap, HashSet}; use eyre::ensure; @@ -11,12 +13,13 @@ pub fn find_anchors( source_map: &SourceMap, ic_pc_map: &IcPcMap, items: &[CoverageItem], - items_by_source_id: &HashMap>, + items_by_source_id: &HashMap>, + source_id: &SourceIdentifier, ) -> Vec { let mut seen = HashSet::with_hasher(DefaultHashBuilder::default()); source_map .iter() - .filter_map(|element| items_by_source_id.get(&(element.index()? as usize))) + .filter_map(|_element| items_by_source_id.get(source_id)) .flatten() .filter_map(|&item_id| { if !seen.insert(item_id) { @@ -171,7 +174,8 @@ pub fn find_anchor_branch( /// Calculates whether `element` is within the range of the target `location`. fn is_in_source_range(element: &SourceElement, location: &SourceLocation) -> bool { // Source IDs must match. - let source_ids_match = element.index().is_some_and(|a| a as usize == location.source_id); + let source_ids_match = + element.index().is_some_and(|a| a as usize == location.source_id.source_id); if !source_ids_match { return false; } diff --git a/crates/evm/coverage/src/lib.rs b/crates/evm/coverage/src/lib.rs index ad4ab53e3cf1..481cc0bb1f96 100644 --- a/crates/evm/coverage/src/lib.rs +++ b/crates/evm/coverage/src/lib.rs @@ -12,6 +12,7 @@ extern crate foundry_common; extern crate tracing; use alloy_primitives::{map::HashMap, Bytes, B256}; +use analysis::SourceIdentifier; use eyre::{Context, Result}; use foundry_compilers::artifacts::sourcemap::SourceMap; use semver::Version; @@ -36,9 +37,9 @@ pub use inspector::CoverageCollector; #[derive(Clone, Debug, Default)] pub struct CoverageReport { /// A map of source IDs to the source path. - pub source_paths: HashMap<(Version, usize), PathBuf>, + pub source_paths: HashMap<(Version, SourceIdentifier), PathBuf>, /// A map of source paths to source IDs. - pub source_paths_to_ids: HashMap<(Version, PathBuf), usize>, + pub source_paths_to_ids: HashMap<(Version, PathBuf), SourceIdentifier>, /// All coverage items for the codebase, keyed by the compiler version. pub items: HashMap>, /// All item anchors for the codebase, keyed by their contract ID. @@ -51,14 +52,14 @@ pub struct CoverageReport { impl CoverageReport { /// Add a source file path. - pub fn add_source(&mut self, version: Version, source_id: usize, path: PathBuf) { - self.source_paths.insert((version.clone(), source_id), path.clone()); + pub fn add_source(&mut self, version: Version, source_id: SourceIdentifier, path: PathBuf) { + self.source_paths.insert((version.clone(), source_id.clone()), path.clone()); self.source_paths_to_ids.insert((version, path), source_id); } /// Get the source ID for a specific source file path. - pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option { - self.source_paths_to_ids.get(&(version, path)).copied() + pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option { + self.source_paths_to_ids.get(&(version, path)).cloned() } /// Add the source maps. @@ -89,7 +90,7 @@ impl CoverageReport { for (version, items) in self.items.iter() { for item in items { let Some(path) = - self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned() + self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned() else { continue; }; @@ -107,7 +108,7 @@ impl CoverageReport { for (version, items) in self.items.iter() { for item in items { let Some(path) = - self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned() + self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned() else { continue; }; @@ -160,7 +161,7 @@ impl CoverageReport { self.items.retain(|version, items| { items.retain(|item| { self.source_paths - .get(&(version.clone(), item.loc.source_id)) + .get(&(version.clone(), item.loc.source_id.clone())) .map(|path| filter(path)) .unwrap_or(false) }); @@ -259,7 +260,7 @@ impl HitMap { #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct ContractId { pub version: Version, - pub source_id: usize, + pub source_id: SourceIdentifier, pub contract_name: Arc, } @@ -348,7 +349,7 @@ impl Display for CoverageItem { #[derive(Clone, Debug)] pub struct SourceLocation { /// The source ID. - pub source_id: usize, + pub source_id: SourceIdentifier, /// The contract this source range is in. pub contract_name: Arc, /// Start byte in the source code. diff --git a/crates/forge/bin/cmd/coverage.rs b/crates/forge/bin/cmd/coverage.rs index e8146528612f..2dceea991512 100644 --- a/crates/forge/bin/cmd/coverage.rs +++ b/crates/forge/bin/cmd/coverage.rs @@ -4,7 +4,7 @@ use clap::{Parser, ValueEnum, ValueHint}; use eyre::{Context, Result}; use forge::{ coverage::{ - analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles}, + analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles, SourceIdentifier}, anchors::find_anchors, BytecodeReporter, ContractId, CoverageReport, CoverageReporter, DebugReporter, ItemAnchor, LcovReporter, SummaryReporter, @@ -162,7 +162,6 @@ impl CoverageArgs { // Collect source files. let project_paths = &project.paths; let mut versioned_sources = HashMap::>::default(); - // Account cached and freshly compiled sources for (id, artifact) in output.artifact_ids() { // Filter out dependencies @@ -171,6 +170,7 @@ impl CoverageArgs { } let version = id.version; + let build_id = id.build_id; let source_file = if let Some(source_file) = artifact.source_file() { source_file } else { @@ -178,7 +178,8 @@ impl CoverageArgs { continue; }; - report.add_source(version.clone(), source_file.id as usize, id.source.clone()); + let identifier = SourceIdentifier::new(source_file.id as usize, build_id.clone()); + report.add_source(version.clone(), identifier.clone(), id.source.clone()); if let Some(ast) = source_file.ast { let file = project_paths.root.join(id.source); @@ -194,7 +195,7 @@ impl CoverageArgs { .entry(version.clone()) .or_default() .sources - .insert(source_file.id as usize, source); + .insert(identifier, source); } } @@ -219,17 +220,23 @@ impl CoverageArgs { ); for (item_id, item) in source_analysis.items.iter().enumerate() { - items_by_source_id.entry(item.loc.source_id).or_default().push(item_id); + items_by_source_id.entry(item.loc.source_id.clone()).or_default().push(item_id); } let anchors = artifacts .par_iter() .filter(|artifact| artifact.contract_id.version == *version) .map(|artifact| { - let creation_code_anchors = - artifact.creation.find_anchors(&source_analysis, &items_by_source_id); - let deployed_code_anchors = - artifact.deployed.find_anchors(&source_analysis, &items_by_source_id); + let creation_code_anchors = artifact.creation.find_anchors( + &source_analysis, + &items_by_source_id, + &artifact.contract_id.source_id, + ); + let deployed_code_anchors = artifact.deployed.find_anchors( + &source_analysis, + &items_by_source_id, + &artifact.contract_id.source_id, + ); (artifact.contract_id.clone(), (creation_code_anchors, deployed_code_anchors)) }) .collect::>(); @@ -387,7 +394,11 @@ pub struct ArtifactData { } impl ArtifactData { - pub fn new(id: &ArtifactId, source_id: usize, artifact: &impl Artifact) -> Option { + pub fn new( + id: &ArtifactId, + source_id: SourceIdentifier, + artifact: &impl Artifact, + ) -> Option { Some(Self { contract_id: ContractId { version: id.version.clone(), @@ -432,7 +443,8 @@ impl BytecodeData { pub fn find_anchors( &self, source_analysis: &SourceAnalysis, - items_by_source_id: &HashMap>, + items_by_source_id: &HashMap>, + source_id: &SourceIdentifier, ) -> Vec { find_anchors( &self.bytecode, @@ -440,6 +452,7 @@ impl BytecodeData { &self.ic_pc_map, &source_analysis.items, items_by_source_id, + source_id, ) } } diff --git a/crates/forge/src/coverage.rs b/crates/forge/src/coverage.rs index de8d0a8aa853..80a14119685b 100644 --- a/crates/forge/src/coverage.rs +++ b/crates/forge/src/coverage.rs @@ -222,8 +222,10 @@ impl CoverageReporter for BytecodeReporter { .map(|h| format!("[{h:03}]")) .unwrap_or(" ".to_owned()); let source_id = source_element.index(); - let source_path = source_id.and_then(|i| { - report.source_paths.get(&(contract_id.version.clone(), i as usize)) + let source_path = source_id.and_then(|_i| { + report + .source_paths + .get(&(contract_id.version.clone(), contract_id.source_id.clone())) }); let code = format!("{code:?}"); diff --git a/crates/forge/tests/cli/coverage.rs b/crates/forge/tests/cli/coverage.rs index a8d722e0728c..67a0dd4d1304 100644 --- a/crates/forge/tests/cli/coverage.rs +++ b/crates/forge/tests/cli/coverage.rs @@ -1662,22 +1662,35 @@ forgetest!(test_coverage_multi_solc_versions, |prj, cmd| { }); // checks that `clean` also works with the "out" value set in Config -forgetest!(coverage_cache, |prj, cmd| { - prj.add_source("A", r#" +// this test verifies that the coverage is preserved across compiler runs that may result in files +// with different source_id's +forgetest!(coverage_cache_across_compiler_runs, |prj, cmd| { + prj.add_source( + "A", + r#" contract A { function f() public pure returns (uint) { return 1; } -}"#).unwrap(); +}"#, + ) + .unwrap(); - prj.add_source("B", r#" + prj.add_source( + "B", + r#" contract B { function f() public pure returns (uint) { return 1; } -}"#).unwrap(); +}"#, + ) + .unwrap(); - let a_test = prj.add_test("A.t.sol", r#" + let a_test = prj + .add_test( + "A.t.sol", + r#" import {A} from "../src/A.sol"; contract ATest { @@ -1686,9 +1699,13 @@ contract ATest { a.f(); } } - "#).unwrap(); + "#, + ) + .unwrap(); - prj.add_test("B.t.sol", r#" + prj.add_test( + "B.t.sol", + r#" import {B} from "../src/B.sol"; contract BTest { @@ -1697,8 +1714,10 @@ contract ATest { a.f(); } } - "#).unwrap(); - + "#, + ) + .unwrap(); + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... Ran 2 test suites [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) @@ -1718,7 +1737,8 @@ Ran 2 test suites [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) | File | % Lines | % Statements | % Branches | % Funcs | |-----------|---------------|---------------|---------------|---------------| | src/A.sol | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) | -| Total | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) | +| src/B.sol | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) | +| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | "#]]); });