Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 26 additions & 13 deletions anneal/src/aeneas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,8 @@ pub fn generate_lean_workspace(roots: &LockedRoots, artifacts: &[AnnealArtifact]
if pkg["type"] == "path" {
if let Some(dir_str) = pkg["dir"].as_str() {
if dir_str == "../../packages/mathlib" {
pkg["dir"] = serde_json::Value::String(".lake/packages/mathlib".to_string());
pkg["dir"] =
serde_json::Value::String(".lake/packages/mathlib".to_string());
}
}
}
Expand Down Expand Up @@ -479,8 +480,10 @@ pub fn generate_lean_workspace(roots: &LockedRoots, artifacts: &[AnnealArtifact]
materialize_package_optimized(&toolchain_aeneas_dir, &user_aeneas_dir)
.context("Failed to materialize Aeneas package")?;

// Rename Aeneas precompiled configuration directory from [anonymous] to aeneas,
// and patch the trace file so that Lake validates the precompiled config correctly.
// Rename the Aeneas precompiled configuration directory from
// `[anonymous]` to `aeneas`, and patch its trace file so that
// Lake validates the precompiled configuration without
// re-elaborating it.
let anon_config = user_aeneas_dir.join(".lake").join("config").join("[anonymous]");
let user_config = user_aeneas_dir.join(".lake").join("config").join("aeneas");
if anon_config.exists() {
Expand Down Expand Up @@ -619,8 +622,6 @@ fn run_lake(roots: &LockedRoots, artifacts: &[AnnealArtifact]) -> Result<()> {
let lean_root = generated.parent().unwrap();
log::info!("Running 'lake build' in {}", lean_root.display());



// 2. Build the project (dependencies only)
let toolchain = crate::setup::Toolchain::resolve()?;
let mut cmd = toolchain.command(Tool::Lake);
Expand Down Expand Up @@ -1095,7 +1096,11 @@ fn materialize_package_optimized(src_dir: &Path, dest_dir: &Path) -> Result<()>

if path.is_file() {
// Copy top-level metadata files
if file_name == "lakefile.lean" || file_name == "lakefile.toml" || file_name == "lean-toolchain" || file_name == "lake-manifest.json" {
if file_name == "lakefile.lean"
|| file_name == "lakefile.toml"
|| file_name == "lean-toolchain"
|| file_name == "lake-manifest.json"
{
let dest_file = dest_dir.join(file_name);
fs::copy(&path, &dest_file)
.with_context(|| format!("Failed to copy file {}", file_name))?;
Expand All @@ -1122,27 +1127,35 @@ fn materialize_package_optimized(src_dir: &Path, dest_dir: &Path) -> Result<()>
copy_dir_recursive(&path, &dest_widget)?;
make_dir_writable_recursive(&dest_widget)?;
} else if file_name == ".lake" {
// Recreate `.lake` and `.lake/build`/`.lake/config` structure, and copy precompiled data
// Recreate the `.lake` structure and copy
// precompiled build and config data.
let src_build = path.join("build");
let src_config = path.join("config");
let dest_lake = dest_dir.join(".lake");
let dest_build = dest_lake.join("build");
let dest_config = dest_lake.join("config");

if src_build.exists() {
fs::create_dir_all(&dest_build).context("Failed to create dest build directory")?;
fs::create_dir_all(&dest_build)
.context("Failed to create dest build directory")?;
materialize_build_dir_optimized(&src_build, &dest_build)?;
}

if src_config.exists() {
// Copy precompiled config (.olean & .trace) physically and make it writable
// Copy precompiled config
// physically and make it
// writable.
copy_dir_recursive(&src_config, &dest_config)?;
make_dir_writable_recursive(&dest_config)?;

// Adjust the package index (idx) in the trace file.
// Since Aeneas is injected at index 0 in the sandbox manifest,
// every other dependency's index shifts exactly by +1.
// We parse the trace JSON, increment the `idx` field, and write it back.
// Adjust the package index (`idx`)
// in the trace file. Since Aeneas
// is prepended at index 0 in the
// sandbox manifest, every other
// dependency's index shifts by +1.
//
// We parse the trace JSON, increment
// the `idx` field, and write it back.
let trace_file = dest_config.join(pkg_name).join("lakefile.olean.trace");
if trace_file.exists() {
let content = fs::read_to_string(&trace_file)
Expand Down
30 changes: 16 additions & 14 deletions anneal/src/setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -853,9 +853,12 @@ pub fn run_setup() -> Result<()> {

// Read the newly created HEAD commit hash and write it to a static file
// `aeneas-commit.txt` inside the package directory.
// This allows us to read the hash directly at runtime during `cargo-anneal verify`
// instead of executing `git rev-parse HEAD`, which would fail inside Docker
// containers due to Git's "dubious ownership" security checks (due to mapped UIDs).
//
// This allows us to read the hash directly at runtime during `cargo-anneal
// verify` instead of executing `git rev-parse HEAD`. Spawning Git at
// runtime would fail inside Docker containers due to Git's "dubious
// ownership" security checks (caused by UID mapping mismatches between the
// build runner and the test runner).
let output = Command::new("git")
.args(["rev-parse", "HEAD"])
.current_dir(&lean_dir)
Expand All @@ -864,10 +867,8 @@ pub fn run_setup() -> Result<()> {
if !output.status.success() {
bail!("`git rev-parse HEAD` failed during setup");
}
let commit_hash = String::from_utf8(output.stdout)
.context("Failed to parse git output")?
.trim()
.to_string();
let commit_hash =
String::from_utf8(output.stdout).context("Failed to parse git output")?.trim().to_string();
fs::write(lean_dir.join("aeneas-commit.txt"), commit_hash)
.context("Failed to write aeneas-commit.txt")?;

Expand Down Expand Up @@ -905,8 +906,6 @@ pub fn run_setup() -> Result<()> {
}
}



// We now perform the core rewriting of dependency configurations to point
// to the filesystem-local paths we just created using relative path dependencies.
let manifest_path = lean_dir.join("lake-manifest.json");
Expand Down Expand Up @@ -971,10 +970,12 @@ pub fn run_setup() -> Result<()> {

prebuild_lean_library(&lean_dir, &tmp_root.join("lake-cache"))?;

// Pre-compile the `graph` tool in importGraph dependency. This is required
// because Aeneas library itself does not import importGraph, so a standard
// `lake build` skips it. We must pre-compile it during setup so that the
// `lake exe graph` tool is available in the read-only toolchain.
// Pre-compile the `graph` tool in the `importGraph` dependency. This is
// required because the Aeneas library itself does not import
// `importGraph`, so a standard `lake build` skips it.
//
// We must pre-compile it during setup so that the `lake exe graph` tool is
// pre-built and available in the final read-only toolchain.
println!("Pre-compiling importGraph graph tool inside toolchain...");
let status = Command::new("lake")
.args(["build", "importGraph/graph"])
Expand All @@ -998,7 +999,8 @@ pub fn run_setup() -> Result<()> {
if path.is_dir() {
let dep_manifest = path.join("lake-manifest.json");
if dep_manifest.exists() {
fs::remove_file(&dep_manifest).context("Failed to delete dependency manifest")?;
fs::remove_file(&dep_manifest)
.context("Failed to delete dependency manifest")?;
}
}
}
Expand Down
7 changes: 0 additions & 7 deletions anneal/tests/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -274,8 +274,6 @@ impl TestContext {
Some(temp)
};



// Copy extra inputs based on config.
for extra in &config.extra_inputs {
let extra_path = test_case_root.join(extra);
Expand Down Expand Up @@ -399,8 +397,6 @@ echo "---END-INVOCATION---" >> "{}"
cmd.env("ELAN_HOME", elan_home);
}



// Resolve Mocks

// Re-organizing execution flow:
Expand Down Expand Up @@ -465,7 +461,6 @@ echo "---END-INVOCATION---" >> "{}"
cmd.env("ANNEAL_FORCE_TTY", "1");
cmd.env("FORCE_COLOR", "1");


cmd.env("ANNEAL_USE_PATH_FOR_TOOLS", "1");
cmd.env("RAYON_NUM_THREADS", "1");

Expand All @@ -484,8 +479,6 @@ echo "---END-INVOCATION---" >> "{}"
cmd.env("PATH", new_path);
cmd.env("RUSTFLAGS", rustflags);



// Redirect HOME to the persistent home directory within the sandbox.
// This ensures that the toolchain is looked up and potentially
// repaired/reinstalled in a location that is isolated from the
Expand Down
Loading