diff --git a/crytic_compile/compilation_unit.py b/crytic_compile/compilation_unit.py index d0ae18d3..b291412c 100644 --- a/crytic_compile/compilation_unit.py +++ b/crytic_compile/compilation_unit.py @@ -9,7 +9,7 @@ import re import uuid from collections import defaultdict -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from crytic_compile.compiler.compiler import CompilerVersion from crytic_compile.source_unit import SourceUnit @@ -40,6 +40,10 @@ def __init__(self, crytic_compile: "CryticCompile", unique_id: str): # set containing all the filenames of this compilation unit self._filenames: list[Filename] = [] + # mapping from source ID to filename (for Foundry/Hardhat source map compatibility) + # When set, this takes precedence over _filenames for export ordering + self._source_id_to_filename: dict[int, Filename] = {} + # mapping from absolute/relative/used to filename self._filenames_lookup: dict[str, Filename] | None = None @@ -181,6 +185,61 @@ def filenames(self, all_filenames: list[Filename]) -> None: """ self._filenames = all_filenames + @property + def filenames_for_export(self) -> list[Filename]: + """Return filenames in the correct order for export (matching source map indices). + + If source ID mapping is available (from Foundry/Hardhat build-info), returns + filenames ordered by source ID. Otherwise, returns filenames in append order. + + Returns: + list[Filename]: Filenames ordered for export + """ + if not self._source_id_to_filename: + return self._filenames + + # Build list indexed by source ID; +1 because IDs are zero-indexed + max_id = max(self._source_id_to_filename.keys()) + size = max(max_id + 1, len(self._filenames)) + result: list[Filename | None] = [None] * size + + for source_id, filename in self._source_id_to_filename.items(): + result[source_id] = filename + + # Fill gaps with filenames from _filenames that aren't in the mapping + mapped_filenames = set(self._source_id_to_filename.values()) + unmapped = [f for f in self._filenames if f not in mapped_filenames] + unmapped_iter = iter(unmapped) + + for i, entry in enumerate(result): + if entry is None: + try: + result[i] = next(unmapped_iter) + except StopIteration: + break + + # Gaps in the source ID sequence mean the build-info is incomplete; + # exporting with shifted indices would silently produce wrong source maps + gaps = [i for i, f in enumerate(result) if f is None] + if gaps: + raise ValueError( + f"Source ID gaps at indices {gaps} — cannot produce correct sourceList. " + f"This likely indicates missing sources in build-info." + ) + + return cast(list[Filename], result) + + def set_source_id(self, source_id: int, filename: Filename) -> None: + """Set the source ID for a filename. + + This is used by Foundry/Hardhat parsers to maintain correct source map indices. + + Args: + source_id (int): The source ID from the build-info + filename (Filename): The filename associated with this ID + """ + self._source_id_to_filename[source_id] = filename + @property def filename_to_contracts(self) -> dict[Filename, set[str]]: """Return a dict mapping the filename to a list of contract declared diff --git a/crytic_compile/platform/hardhat.py b/crytic_compile/platform/hardhat.py index fd63f4ad..d357b6be 100755 --- a/crytic_compile/platform/hardhat.py +++ b/crytic_compile/platform/hardhat.py @@ -95,7 +95,13 @@ def hardhat_like_parsing( ] if "sources" in targets_json: - for path, info in targets_json["sources"].items(): + # Sort sources by ID to ensure correct processing order + sources_with_ids = [ + (path, info, info.get("id")) for path, info in targets_json["sources"].items() + ] + sources_with_ids.sort(key=lambda x: x[2] if x[2] is not None else float("inf")) + + for original_path, info, source_id in sources_with_ids: if skip_filename: path = convert_filename( target, @@ -104,7 +110,7 @@ def hardhat_like_parsing( working_dir=working_dir, ) else: - path = process_hardhat_v3_filename(path) + path = process_hardhat_v3_filename(original_path) path = convert_filename( path, @@ -120,6 +126,10 @@ def hardhat_like_parsing( f"AST not found for {path} in {build_info} directory" ) + # Store source ID mapping for correct export ordering + if source_id is not None: + compilation_unit.set_source_id(source_id, path) + if "contracts" in targets_json: for original_filename, contracts_info in targets_json["contracts"].items(): original_filename = process_hardhat_v3_filename(original_filename) diff --git a/crytic_compile/platform/solc.py b/crytic_compile/platform/solc.py index 8775911f..fe14b872 100644 --- a/crytic_compile/platform/solc.py +++ b/crytic_compile/platform/solc.py @@ -76,7 +76,8 @@ def export_to_solc_from_compilation_unit( # Create additional informational objects. sources = {filename: {"AST": ast} for (filename, ast) in compilation_unit.asts.items()} - source_list = [x.absolute for x in compilation_unit.filenames] + # Use filenames_for_export to ensure correct source map index ordering + source_list = [x.absolute for x in compilation_unit.filenames_for_export] # Create our root object to contain the contracts and other information. output = {"sources": sources, "sourceList": source_list, "contracts": contracts}