diff --git a/anneal/flake.nix b/anneal/flake.nix index 3e8d15f961..0016e544b5 100644 --- a/anneal/flake.nix +++ b/anneal/flake.nix @@ -596,13 +596,32 @@ EOF leanBuild = self.packages.${system}.aeneas-compiled; rustToolchain = self.packages.${system}.rust-toolchain; + leanToolchain = self.packages.${system}.lean-toolchain; + aeneasUnpacked = self.packages.${system}.aeneas-unpacked; buildPhase = '' - # 1. Recreate the staging directory layout + # 1. Recreate the staging directory layout recursively. + # + # Why: Since we copy from multiple read-only Nix store paths, we must + # recursively set write permissions after each copy. Otherwise, subsequent + # copies would crash with `Permission denied` when writing into newly created + # read-only chroot folders. mkdir -p $TMPDIR/dist_staging + cp -r $leanBuild/* $TMPDIR/dist_staging/ + chmod -R +w $TMPDIR/dist_staging + cp -r $rustToolchain/* $TMPDIR/dist_staging/ chmod -R +w $TMPDIR/dist_staging + + cp -r $leanToolchain/* $TMPDIR/dist_staging/ + chmod -R +w $TMPDIR/dist_staging + + # Copy precompiled Aeneas and Charon binaries from the unpacked release + cp $aeneasUnpacked/aeneas $TMPDIR/dist_staging/ + cp $aeneasUnpacked/charon $TMPDIR/dist_staging/ + cp $aeneasUnpacked/charon-driver $TMPDIR/dist_staging/ + chmod -R +w $TMPDIR/dist_staging # 2. Un-Nixify staging binaries recursively to make them relocatable. # @@ -619,7 +638,13 @@ EOF if patchelf --print-interpreter "$file" >/dev/null 2>&1; then patchelf --set-interpreter /lib64/ld-linux-x86-64.so.2 "$file" || true fi - patchelf --set-rpath "" "$file" || true + # Set relative RPATH pointing to our relocatable toolchain library folders. + # + # Why: Executables (such as `charon-driver`) require shared compiler + # runtime libraries (like `librustc_driver.so`) to execute. We set their RPATH + # to use the `$ORIGIN` dynamic linker variable, enabling them to locate + # their shared libraries chroot-relatively regardless of the install directory. + patchelf --set-rpath '$ORIGIN/lib:$ORIGIN/lib/lean' "$file" || true strip "$file" || true fi done @@ -679,6 +704,76 @@ EOF ''; }; + # Standard sandboxed check that compiles the cargo-anneal Rust binary + # and executes its integration tests completely offline. + # It mounts the precompiled relocatable Zstd toolchain in the store + # and feeds it locally via the ANNEAL_TEST_LOCAL_ARCHIVE environment variable. + checks.integration-tests = pkgs.stdenv.mkDerivation { + pname = "cargo-anneal-integration-tests"; + version = "0.1.0"; + + # Use the Aeneas Cargo project directory as source + src = ./.; + + nativeBuildInputs = with pkgs; [ + cargo + rustc + git + zstd + gnutar + graphviz + openssl + pkg-config + cacert + steam-run + ]; + + buildInputs = with pkgs; [ + openssl + ]; + + # Precompiled monolithic Zstd toolchain package in the Nix store + omnibusArchive = self.packages.${system}.omnibus-archive; + + # Set up CARGO_HOME and cache directories inside the writeable sandbox + CARGO_HOME = "/build/.cargo"; + RUSTUP_HOME = "/build/.rustup"; + + # OpenSSL package mapping for cargo pkg-config + PKG_CONFIG_PATH = "${pkgs.openssl.dev}/lib/pkgconfig"; + + buildPhase = '' + export HOME=$TMPDIR + + # Feed the absolute Nix store path of the precompiled relocatable Zstd + # archive to the test process. The integration test harness reads this + # variable and dynamically forwards it via `--local-archive` CLI flags. + export ANNEAL_TEST_LOCAL_ARCHIVE="$omnibusArchive" + + # We must use the vendored-sources config to compile offline + # from local crates. + mkdir -p .cargo + cat < .cargo/config.toml +[source.crates-io] +replace-with = "vendored-sources" + +[source.vendored-sources] +directory = "vendor" +EOF + + # Run cargo test offline inside the hermetic sandbox! + # We filter to only run integration tests to keep execution scoped. + echo "Running cargo integration tests..." + steam-run cargo test --test integration --offline -- --nocapture + ''; + + installPhase = '' + # Checks only need to write a dummy successful output file to the store + echo "Integration tests completed successfully!" > $out + ''; + }; + + diff --git a/anneal/src/aeneas.rs b/anneal/src/aeneas.rs index 834d31f3a9..011eb52b29 100644 --- a/anneal/src/aeneas.rs +++ b/anneal/src/aeneas.rs @@ -415,21 +415,15 @@ pub fn generate_lean_workspace(roots: &LockedRoots, artifacts: &[AnnealArtifact] // Change name to anneal_verification manifest["name"] = serde_json::Value::String("anneal_verification".to_string()); - // Read actual HEAD commit of Aeneas in toolchain let toolchain_aeneas_dir = toolchain.root.join("backends").join("lean"); - let output = Command::new("git") - .args(["rev-parse", "HEAD"]) - .current_dir(&toolchain_aeneas_dir) - .output() - .context("Failed to run `git rev-parse HEAD` in toolchain Aeneas")?; - if !output.status.success() { - bail!("`git rev-parse HEAD` failed in toolchain Aeneas"); - } - let aeneas_rev = String::from_utf8(output.stdout) - .context("Failed to parse git output")? - .trim() - .to_string(); + // Use the statically pinned compile-time revision of Aeneas. + // + // Why: In our hermetic relocatable toolchain, all Aeneas version assets + // are strictly locked and precompiled in the store. Wiping out unstable `.git` + // directories inside the FOD prevents dynamic `git rev-parse` executions from + // succeeding. We safely reuse the static build revision directly. + let aeneas_rev = env!("ANNEAL_AENEAS_REV").to_string(); // Inject aeneas dependency let aeneas_dep = serde_json::json!({ @@ -488,6 +482,21 @@ pub fn generate_lean_workspace(roots: &LockedRoots, artifacts: &[AnnealArtifact] bail!("Failed to copy Aeneas directory from toolchain to workspace"); } + // Initialize git repository in the copied Aeneas directory. + // + // Why: The precompiled relocatable toolchain does not contain a `.git` folder. + // To prevent `git remote add` from searching upwards and hitting the host's + // git repository (causing `remote origin already exists` errors), we explicitly + // initialize the copied folder as a standalone Git repository inside the sandbox. + let status = Command::new("git") + .args(["init", "-b", "main", "-q"]) + .current_dir(&user_aeneas_dir) + .status() + .context("Failed to run `git init` in Aeneas clone")?; + if !status.success() { + bail!("`git init` failed in Aeneas clone"); + } + // Add Git remote 'origin' to match manifest let status = Command::new("git") .args([ diff --git a/anneal/src/setup.rs b/anneal/src/setup.rs index b876c2dd36..7806aa6a94 100644 --- a/anneal/src/setup.rs +++ b/anneal/src/setup.rs @@ -452,160 +452,7 @@ fn update_manifest_urls(manifest_path: &Path, base_path: &Path) -> Result<()> { } -/// Ensures that `elan` (the Lean toolchain manager) is installed on the -/// system. If it is not found in the system `PATH`, it downloads the latest -/// release from GitHub and runs `elan-init` to install it for the current -/// user. It also attempts to add the `elan` bin directory to the current -/// process's `PATH` to ensure subsequent commands can find it immediately. -fn ensure_elan_installed() -> Result<()> { - println!("Checking for elan..."); - let status = Command::new("elan") - .arg("--version") - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()) - .status(); - - if status.is_ok() && status.unwrap().success() { - println!("elan is already installed."); - return Ok(()); - } - - println!("elan not found. Installing elan..."); - let platform = Platform::detect()?; - let arch = platform.triple(); - - let url = - format!("https://github.com/leanprover/elan/releases/latest/download/elan-{}.tar.gz", arch); - - println!("Downloading elan from {}...", url); - let response = reqwest::blocking::get(&url).context("Failed to download elan")?; - if !response.status().is_success() { - bail!("Failed to download elan: HTTP {}", response.status()); - } - let data = response.bytes().context("Failed to read elan response")?; - - let temp_dir = std::env::temp_dir(); - let elan_extract_dir = temp_dir.join("elan-extract-anneal"); - fs::create_dir_all(&elan_extract_dir).context("Failed to create elan extract dir")?; - - extract_artifact(&data, &elan_extract_dir)?; - - let elan_init_path = elan_extract_dir.join("elan-init"); - - let status = Command::new(&elan_init_path) - .args(["-y", "--default-toolchain", "none"]) - .status() - .context("Failed to run elan-init")?; - - let _ = fs::remove_dir_all(&elan_extract_dir); - - if !status.success() { - bail!("elan-init failed"); - } - - // Add elan to PATH for current process - let home = dirs::home_dir().ok_or_else(|| anyhow::anyhow!("Could not find home directory"))?; - let elan_bin = home.join(".elan").join("bin"); - if elan_bin.exists() { - let path = std::env::var_os("PATH").unwrap_or_default(); - let mut paths = std::env::split_paths(&path).collect::>(); - if !paths.contains(&elan_bin) { - paths.insert(0, elan_bin); - let new_path = std::env::join_paths(paths)?; - // SAFETY: This is a single-threaded setup context. - unsafe { - std::env::set_var("PATH", new_path); - } - } - } - - println!("elan installed successfully."); - Ok(()) -} - -/// Installs the pinned Lean toolchain required by Anneal using `elan`. -/// It checks the environment variable `ANNEAL_LEAN_TOOLCHAIN` to determine -/// which version to install. If the version is already listed in `elan -/// toolchain list`, it skips installation to save time. -fn install_lean_toolchain() -> Result<()> { - let version = env!("ANNEAL_LEAN_TOOLCHAIN"); - println!("Installing Lean toolchain {}...", version); - - // Check if already installed - let output = Command::new("elan") - .args(["toolchain", "list"]) - .output() - .context("Failed to run elan toolchain list")?; - - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - if stdout.lines().any(|line| line.contains(version)) { - println!("Lean toolchain {} is already installed.", version); - return Ok(()); - } - } - - let status = Command::new("elan") - .args(["toolchain", "install", version]) - .status() - .context("Failed to run elan toolchain install")?; - - if !status.success() { - bail!("Failed to install Lean toolchain"); - } - - println!("Lean toolchain {} installed successfully.", version); - Ok(()) -} - -/// Pre-builds the Aeneas Lean library in the extracted toolchain directory. -/// This prevents compiling the library from source when users verify their -/// projects, which is slow and disk-heavy. It first attempts to fetch -/// pre-compiled Mathlib artifacts using `lake exe cache get` to avoid -/// compiling Mathlib from source, and then runs `lake build`. -fn prebuild_lean_library(lean_dir: &Path, cache_dir: &Path) -> Result<()> { - println!("Pre-building Aeneas Lean library at {:?}...", lean_dir); - - // Fetch Mathlib cache - println!("Fetching Mathlib cache..."); - let status = Command::new("lake") - .args(["exe", "cache", "get"]) - .env("LAKE_CACHE_DIR", cache_dir) - .env("LAKE_ARTIFACT_CACHE", "1") - .current_dir(lean_dir) - .status() - .context("Failed to run `lake exe cache get`")?; - - if !status.success() { - bail!("Failed to fetch Mathlib cache; refusing to build from scratch"); - } - - // Build the library - println!("Building Aeneas Lean library..."); - let status = Command::new("lake") - .arg("build") - .env("LAKE_CACHE_DIR", cache_dir) - .env("LAKE_ARTIFACT_CACHE", "1") - .current_dir(lean_dir) - .status() - .context("Failed to run `lake build`")?; - - if !status.success() { - bail!("Failed to build Aeneas Lean library"); - } - - println!("Successfully pre-built Aeneas Lean library."); - Ok(()) -} - -/// Checks whether the specified tools are installed and have valid hashes. -/// Returns `true` if all are valid, or `false` if any are missing or corrupt. - -/// Sets up the Anneal toolchain by downloading and extracting dependencies. pub fn run_setup(args: crate::resolve::SetupArgs) -> Result<()> { - ensure_elan_installed()?; - install_lean_toolchain()?; - let mut toolchain = Toolchain::resolve()?; // Drop the shared lock acquired by resolve() so we can acquire an // exclusive one. @@ -619,6 +466,7 @@ pub fn run_setup(args: crate::resolve::SetupArgs) -> Result<()> { let parent_dir = toolchain.root.parent().unwrap(); fs::create_dir_all(parent_dir).context("Failed to create toolchain parent directory")?; + // To ensure atomic installation, we perform all extraction and setup steps // in a temporary directory within the same parent folder. This prevents // leaving the toolchain in a partially-installed state if the process is @@ -660,377 +508,31 @@ pub fn run_setup(args: crate::resolve::SetupArgs) -> Result<()> { extract_artifact(&data, &tmp_root)?; - let lean_dir = tmp_root.join("backends").join("lean"); - - // Initialize git repo in the extracted Lean directory - println!("Initializing git repository in {:?}...", lean_dir); - let status = Command::new("git") - .args(["init", "-b", "main", "-q"]) - .current_dir(&lean_dir) - .status() - .context("Failed to run `git init -b main`")?; - if !status.success() { - bail!("`git init -b main` failed"); - } - - let status = Command::new("git") - .args(["add", "."]) - .current_dir(&lean_dir) - .status() - .context("Failed to run `git add`")?; - if !status.success() { - bail!("`git add` failed"); - } - - let status = Command::new("git") - .args(["commit", "-m", "Initial commit from Anneal setup", "-q"]) - .env("GIT_AUTHOR_NAME", "Anneal") - .env("GIT_AUTHOR_EMAIL", "anneal@example.com") - .env("GIT_COMMITTER_NAME", "Anneal") - .env("GIT_COMMITTER_EMAIL", "anneal@example.com") - .current_dir(&lean_dir) - .status() - .context("Failed to run `git commit`")?; - if !status.success() { - bail!("`git commit` failed"); - } - - // Run `lake update` to resolve dependencies and generate the manifest. - // We do this initially with remote URLs to let Lake resolve conflicts and - // record the specific commit hashes in `lake-manifest.json`. This also - // fetches the sources into `.lake/packages`. - println!("Resolving dependencies with `lake update` in {:?}...", lean_dir); - let status = Command::new("lake") - .arg("update") - .current_dir(&lean_dir) - .status() - .context("Failed to run `lake update`")?; - if !status.success() { - bail!("`lake update` failed"); - } - - // Move the resolved dependency directories to a centralized `packages` - // folder in the toolchain root. This acts as our local registry and - // ensures that they are preserved across builds and not treated as - // transient build artifacts by Lake in `backends/lean`. - let packages_dir = tmp_root.join("packages"); - fs::create_dir_all(&packages_dir).context("Failed to create packages directory")?; - - let lake_packages_dir = lean_dir.join(".lake").join("packages"); - if lake_packages_dir.exists() { - for entry in fs::read_dir(&lake_packages_dir).context("Failed to read .lake/packages")? { - let entry = entry.context("Failed to read directory entry")?; - let path = entry.path(); - if path.is_dir() { - let name = path.file_name().unwrap(); - let dest = packages_dir.join(name); - fs::rename(&path, &dest).context("Failed to move package")?; - } - } - } - - // We now perform the core rewriting of dependency configurations to point - // to the filesystem-local paths we just created. - // - // We first read the manifest to know the exact resolved commit hashes, - // ensuring we preserve the versions resolved by Lake in step 2. - let manifest_path = lean_dir.join("lake-manifest.json"); - let revs = read_manifest_revs(&manifest_path)?; - - // Helper to get rev or fallback - let get_rev = |name: &str| -> &str { revs.get(name).map(|s| s.as_str()).unwrap_or("main") }; - - // We hardcode the target strings for replacement because parsing arbitrary - // Lean configuration files in Rust is fragile. Since this toolchain pins - // specific versions of dependencies, these target strings are stable for - // this version of Anneal. - // - // Rewrite URLs in Aeneas lakefile.lean - let aeneas_lakefile = lean_dir.join("lakefile.lean"); - let mathlib_rev = get_rev("mathlib"); - let target = "require mathlib from git\n \"https://github.com/leanprover-community/mathlib4.git\" @ \"v4.28.0-rc1\""; - let replacement = format!( - "require mathlib from git\n \"file://{}/packages/mathlib\" @ \"{}\"", - tmp_root.display(), - mathlib_rev - ); - strict_replace_file_content(&aeneas_lakefile, target, &replacement)?; - - // Rewrite URLs in Mathlib lakefile.lean - let mathlib_lakefile = packages_dir.join("mathlib").join("lakefile.lean"); - let batteries_rev = get_rev("batteries"); - let qq_rev = get_rev("Qq"); - let aesop_rev = get_rev("aesop"); - let proofwidgets_rev = get_rev("proofwidgets"); - let import_graph_rev = get_rev("importGraph"); - let lean_search_client_rev = get_rev("LeanSearchClient"); - let plausible_rev = get_rev("plausible"); - - let mathlib_replacements = [ - ( - "require \"leanprover-community\" / \"batteries\" @ git \"main\"", - format!( - "require batteries from git \"file://{}/packages/batteries\" @ \"{}\"", - tmp_root.display(), - batteries_rev - ), - ), - ( - "require \"leanprover-community\" / \"Qq\" @ git \"master\"", - format!( - "require Qq from git \"file://{}/packages/Qq\" @ \"{}\"", - tmp_root.display(), - qq_rev - ), - ), - ( - "require \"leanprover-community\" / \"aesop\" @ git \"master\"", - format!( - "require aesop from git \"file://{}/packages/aesop\" @ \"{}\"", - tmp_root.display(), - aesop_rev - ), - ), - ( - "require \"leanprover-community\" / \"proofwidgets\" @ git \"v0.0.86\"", - format!( - "require proofwidgets from git \"file://{}/packages/proofwidgets\" @ \"{}\"", - tmp_root.display(), - proofwidgets_rev - ), - ), - ( - "require \"leanprover-community\" / \"importGraph\" @ git \"main\"", - format!( - "require importGraph from git \"file://{}/packages/importGraph\" @ \"{}\"", - tmp_root.display(), - import_graph_rev - ), - ), - ( - "require \"leanprover-community\" / \"LeanSearchClient\" @ git \"main\"", - format!( - "require LeanSearchClient from git \"file://{}/packages/LeanSearchClient\" @ \"{}\"", - tmp_root.display(), - lean_search_client_rev - ), - ), - ( - "require \"leanprover-community\" / \"plausible\" @ git \"main\"", - format!( - "require plausible from git \"file://{}/packages/plausible\" @ \"{}\"", - tmp_root.display(), - plausible_rev - ), - ), - ]; - - for (target, replacement) in &mathlib_replacements { - strict_replace_file_content(&mathlib_lakefile, target, &replacement)?; - } - - // Rewrite URLs in Aesop lakefile.toml - let aesop_lakefile = packages_dir.join("aesop").join("lakefile.toml"); - let target = "[[require]]\nname = \"batteries\"\ngit = \"https://github.com/leanprover-community/batteries\"\nrev = \"v4.28.0-rc1\""; - let replacement = format!( - "[[require]]\nname = \"batteries\"\ngit = \"file://{}/packages/batteries\"\nrev = \"v4.28.0-rc1\"", - tmp_root.display() - ); - strict_replace_file_content(&aesop_lakefile, target, &replacement)?; - - // Rewrite URLs in ImportGraph lakefile.toml - let import_graph_lakefile = packages_dir.join("importGraph").join("lakefile.toml"); - let target = "[[require]]\nname = \"Cli\"\nscope = \"leanprover\"\nrev = \"v4.28.0-rc1\""; - let replacement = format!( - "[[require]]\nname = \"Cli\"\ngit = \"file://{}/packages/Cli\"\nrev = \"v4.28.0-rc1\"", - tmp_root.display() - ); - strict_replace_file_content(&import_graph_lakefile, target, &replacement)?; - - // We must also update the manifest file itself. If we only update the - // `lakefile`s, Lake will see that the manifest is out of date and attempt - // to re-resolve or re-clone from GitHub. - // - // Rewrite URLs in lake-manifest.json - update_manifest_urls(&manifest_path, tmp_root)?; - - prebuild_lean_library(&lean_dir, &toolchain.cache_dir())?; - - // Phase 2: Rewrite URLs to point to the final toolchain root. - // - // During the build in Phase 1, we used paths pointing to the temporary - // directory so that Lake could resolve and build dependencies correctly. - // Now that the build is complete, we must rewrite these paths to point - // to the final installation directory (`toolchain.root`) before we - // perform the atomic swap. This ensures that when the toolchain is - // used by user projects, all internal references are valid. - println!("Rewriting URLs to final toolchain root..."); - - let target_aeneas = format!( - "require mathlib from git\n \"file://{}/packages/mathlib\" @ \"{}\"", - tmp_root.display(), - mathlib_rev - ); - let replacement_aeneas = format!( - "require mathlib from git\n \"file://{}/packages/mathlib\" @ \"{}\"", - toolchain.root.display(), - mathlib_rev - ); - strict_replace_file_content(&aeneas_lakefile, &target_aeneas, &replacement_aeneas)?; - - for (orig_target, phase1_rep) in &mathlib_replacements { - let phase2_rep = phase1_rep - .replace(&tmp_root.display().to_string(), &toolchain.root.display().to_string()); - strict_replace_file_content(&mathlib_lakefile, &phase1_rep, &phase2_rep)?; - } - - let target_aesop = format!( - "[[require]]\nname = \"batteries\"\ngit = \"file://{}/packages/batteries\"\nrev = \"v4.28.0-rc1\"", - tmp_root.display() - ); - let replacement_aesop = format!( - "[[require]]\nname = \"batteries\"\ngit = \"file://{}/packages/batteries\"\nrev = \"v4.28.0-rc1\"", - toolchain.root.display() - ); - strict_replace_file_content(&aesop_lakefile, &target_aesop, &replacement_aesop)?; - - let target_import_graph = format!( - "[[require]]\nname = \"Cli\"\ngit = \"file://{}/packages/Cli\"\nrev = \"v4.28.0-rc1\"", - tmp_root.display() - ); - let replacement_import_graph = format!( - "[[require]]\nname = \"Cli\"\ngit = \"file://{}/packages/Cli\"\nrev = \"v4.28.0-rc1\"", - toolchain.root.display() - ); - strict_replace_file_content( - &import_graph_lakefile, - &target_import_graph, - &replacement_import_graph, - )?; - - // Also update manifest for Phase 2 - update_manifest_urls(&manifest_path, &toolchain.root)?; - - // Commit changes in local repositories so that Git clone operations - // performed by Lake in user projects will see the rewritten URLs. - println!("Committing changes in local repositories..."); - - // Commit in Aeneas - let status = Command::new("git") - .args(["add", "."]) - .current_dir(&lean_dir) - .status() - .context("Failed to run `git add` in Aeneas")?; - if !status.success() { - bail!("`git add` failed in Aeneas"); - } - - let status = Command::new("git") - .args(["commit", "-m", "Anneal: rewrite dependencies to local paths", "-q"]) - .env("GIT_AUTHOR_NAME", "Anneal") - .env("GIT_AUTHOR_EMAIL", "anneal@example.com") - .env("GIT_COMMITTER_NAME", "Anneal") - .env("GIT_COMMITTER_EMAIL", "anneal@example.com") - .current_dir(&lean_dir) - .status() - .context("Failed to run `git commit` in Aeneas")?; - if !status.success() { - bail!("`git commit` failed in Aeneas"); - } - - // Commit in dependencies - if packages_dir.exists() { - for entry in fs::read_dir(&packages_dir).context("Failed to read packages directory")? { - let entry = entry.context("Failed to read directory entry")?; - let path = entry.path(); - if path.is_dir() { - if path.join(".git").exists() { - let output = Command::new("git") - .args(["status", "--porcelain"]) - .current_dir(&path) - .output() - .context("Failed to run `git status` in dependency")?; - - if !output.stdout.is_empty() { - println!( - "Committing changes in dependency: {:?}", - path.file_name().unwrap() - ); - let status = Command::new("git") - .args(["add", "."]) - .current_dir(&path) - .status() - .context("Failed to run `git add` in dependency")?; - if !status.success() { - bail!("`git add` failed in dependency {:?}", path); - } - - let status = Command::new("git") - .args([ - "commit", - "-m", - "Anneal: rewrite dependencies to local paths", - "-q", - ]) - .env("GIT_AUTHOR_NAME", "Anneal") - .env("GIT_AUTHOR_EMAIL", "anneal@example.com") - .env("GIT_COMMITTER_NAME", "Anneal") - .env("GIT_COMMITTER_EMAIL", "anneal@example.com") - .current_dir(&path) - .status() - .context("Failed to run `git commit` in dependency")?; - if !status.success() { - bail!("`git commit` failed in dependency {:?}", path); - } - } - } - } - } - } - - // Lake records absolute paths in its `.trace` files (e.g., in the - // compiler command lines). Since we built the project in a temporary - // directory, these traces contain paths to that directory. - // - // To prevent Lake from invalidating these traces and rebuilding everything - // when the toolchain is used from its final location, we scan all - // `.trace` files in Aeneas and its dependencies, and replace the - // temporary path with the final installation path. - println!("Post-processing trace files to fix absolute paths..."); - let tmp_path_str = temp_dir.path().to_str().unwrap(); + // Replaces the stable relocatable placeholder string `/ANNEAL_PLACEHOLDER_ROOT` + // inside all trace files, manifests, and configuration files recursively with the + // actual absolute installation path on the user's disk. This activates the prebuilt + // toolchain instantly offline, preventing Lake from invalidating precompiled object + // files and forcing full source compilations. + println!("Post-processing relocatable placeholder paths..."); let final_path_str = toolchain.root.to_str().unwrap(); - - // Helper closure to process a build directory - let mut process_build_dir = |dir: &Path| -> Result<()> { - if dir.exists() { - let walker = WalkDir::new(dir).into_iter(); - for entry in walker { - let entry = entry.context("Failed to walk build directory")?; - let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext == "trace") { - let content = fs::read_to_string(path).context("Failed to read trace file")?; - if content.contains(tmp_path_str) { - let new_content = content.replace(tmp_path_str, final_path_str); - fs::write(path, new_content).context("Failed to write trace file")?; - } + for entry in WalkDir::new(&tmp_root) { + let entry = entry.context("Failed to walk toolchain files")?; + let path = entry.path(); + if path.is_file() { + let name = path.file_name().unwrap().to_string_lossy(); + let ext = path.extension().map(|e| e.to_string_lossy()); + if name == "lake-manifest.json" + || name == "lakefile.lean" + || name == "lakefile.toml" + || ext.as_deref() == Some("trace") + { + let content = fs::read_to_string(path).context("Failed to read file")?; + if content.contains("/ANNEAL_PLACEHOLDER_ROOT") { + let new_content = content.replace("/ANNEAL_PLACEHOLDER_ROOT", final_path_str); + fs::write(path, new_content).context("Failed to write file")?; } } } - Ok(()) - }; - - // Process Aeneas build dir - process_build_dir(&lean_dir.join(".lake").join("build"))?; - - // Process dependencies build dirs - if packages_dir.exists() { - for entry in fs::read_dir(&packages_dir).context("Failed to read packages directory")? { - let entry = entry.context("Failed to read directory entry")?; - let path = entry.path(); - if path.is_dir() { - process_build_dir(&path.join(".lake").join("build"))?; - } - } } println!("Successfully installed toolchain v{}", env!("ANNEAL_OMNIBUS_TAG")); diff --git a/anneal/tests/integration.rs b/anneal/tests/integration.rs index 75f332643f..e48dcc3010 100644 --- a/anneal/tests/integration.rs +++ b/anneal/tests/integration.rs @@ -152,11 +152,27 @@ fn get_toolchain_path() -> PathBuf { let path_str = String::from_utf8(output.stdout).unwrap(); let toolchain_path = PathBuf::from(path_str.trim()); - if !toolchain_path.exists() { - panic!( - "Resolved toolchain path does not exist: {:?}. Please run `cargo run setup` first.", - toolchain_path - ); + let has_real_files = fs::read_dir(&toolchain_path) + .map(|rd| { + rd.filter_map(|e| e.ok()).any(|entry| { + let name = entry.file_name(); + name != ".lock" + }) + }) + .unwrap_or(false); + if !toolchain_path.exists() || !has_real_files { + println!("Toolchain missing or empty in sandbox. Running automatic setup..."); + let mut setup_cmd = Command::new(cargo_bin); + setup_cmd.arg("setup"); + if let Ok(archive_path) = std::env::var("ANNEAL_TEST_LOCAL_ARCHIVE") { + setup_cmd.arg("--local-archive").arg(archive_path); + } else { + panic!("ANNEAL_TEST_LOCAL_ARCHIVE env var is not set, cannot bootstrap toolchain offline!"); + } + let status = setup_cmd + .status() + .expect("Failed to execute cargo-anneal setup during bootstrapping"); + assert!(status.success(), "Automatic sandboxed bootstrapping setup failed!"); } toolchain_path @@ -529,6 +545,16 @@ echo "---END-INVOCATION---" >> "{}" cmd.env("ANNEAL_USE_PATH_FOR_TOOLS", "1"); cmd.env("RAYON_NUM_THREADS", "1"); + // Forward the toolchain root directory. + // + // Why: The bootstrapping phase populates a single global toolchain directory + // inside the sandbox root (`/build/.anneal/toolchain/...`). By exporting + // this variable, we instruct the child process `cargo-anneal` inside the + // test case to resolve its toolchain to this prebuilt directory instead of + // looking inside its own empty, sandboxed `HOME` directory. + let toolchain_base = toolchain_path.parent().unwrap().parent().unwrap().parent().unwrap(); + cmd.env("ANNEAL_TOOLCHAIN_DIR", toolchain_base); + let original_path = std::env::var_os("PATH").unwrap_or_default(); let new_path = std::env::join_paths( std::iter::once(shim_dir).chain(std::env::split_paths(&original_path)), @@ -946,6 +972,15 @@ fn run_single_phase( } } + if let Some(args) = &mut config.args { + if args.first().map(|s| s.as_str()) == Some("setup") { + if let Ok(archive_path) = std::env::var("ANNEAL_TEST_LOCAL_ARCHIVE") { + args.push("--local-archive".into()); + args.push(archive_path); + } + } + } + let assert = ctx.run_anneal(&config); // Verify Exit Status