From c68ce4f4d32c5fd08e86a821f79dd9ef27ba3fdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= Date: Thu, 26 Mar 2026 12:55:41 +0200 Subject: [PATCH 1/5] relax renovate unmaintained rule MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrius Pukšta --- .github/renovate.json5 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 30e06506..50ca287a 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -2,6 +2,7 @@ // // config ref: https://docs.renovatebot.com/configuration-options/ // +// validate: renovate-config-validator .github/renovate.json5 --strict --no-global { $schema: "https://docs.renovatebot.com/renovate-schema.json", extends: [ @@ -11,6 +12,7 @@ "helpers:pinGitHubActionDigestsToSemver", ], minimumReleaseAge: "7 days", + abandonmentThreshold: "3 years", packageRules: [ // Group bumps of all non-major dependencies by using two rules, one for // situations like 1.2.3, and one for situations like 0.1.2. From 9f62ddd8d649cb9083dbaa132a389e6b90b91c68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= Date: Tue, 31 Mar 2026 08:52:43 +0300 Subject: [PATCH 2/5] wip MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrius Pukšta --- Cargo.lock | 447 ++++++++++++++------------- Cargo.toml | 7 +- core/Cargo.toml | 4 +- core/src/commands/add.rs | 141 +++++---- core/src/commands/build.rs | 34 +- core/src/commands/lock.rs | 12 +- core/src/commands/sync.rs | 8 +- core/src/lock.rs | 423 +++++++++++++++++-------- core/src/model.rs | 442 +++++++++++++++++++++++--- core/src/project/any.rs | 9 +- core/src/project/gix_git_download.rs | 265 +++++++++++++++- core/src/project/utils.rs | 119 +++++++ core/src/resolve/gix_git.rs | 8 +- core/src/resolve/mod.rs | 2 + core/src/solve/pubgrub.rs | 139 ++++++--- sysand/src/commands/info.rs | 81 ++++- sysand/src/commands/remove.rs | 94 ++++-- 17 files changed, 1654 insertions(+), 581 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29c03c0e..69b1550a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -262,12 +262,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" version = "1.2.59" @@ -413,26 +407,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - -[[package]] -name = "console_log" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be8aed40e4edbf4d3b4431ab260b63fdc40f5780a4766824329ea0f1eefe3c0f" -dependencies = [ - "log", - "web-sys", -] - [[package]] name = "constant_time_eq" version = "0.4.2" @@ -483,6 +457,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crypto-common" version = "0.1.7" @@ -493,6 +473,20 @@ dependencies = [ "typenum", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "deflate64" version = "0.1.12" @@ -843,18 +837,21 @@ dependencies = [ [[package]] name = "gix" -version = "0.80.0" +version = "0.81.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa56fdbfe98258af2759818ddc3175cc581112660e74c3fd55669836d29a994" +checksum = "0473c64d9ccbcfb9953a133b47c8b9a335b87ac6c52b983ee4b03d49000b0f3f" dependencies = [ "gix-actor", + "gix-archive", "gix-attributes", + "gix-blame", "gix-command", "gix-commitgraph", "gix-config", "gix-credentials", "gix-date", "gix-diff", + "gix-dir", "gix-discover", "gix-error", "gix-features", @@ -866,6 +863,7 @@ dependencies = [ "gix-ignore", "gix-index", "gix-lock", + "gix-merge", "gix-negotiate", "gix-object", "gix-odb", @@ -880,6 +878,7 @@ dependencies = [ "gix-revwalk", "gix-sec", "gix-shallow", + "gix-status", "gix-submodule", "gix-tempfile", "gix-trace", @@ -890,6 +889,7 @@ dependencies = [ "gix-validate", "gix-worktree", "gix-worktree-state", + "gix-worktree-stream", "nonempty", "smallvec", "thiserror 2.0.18", @@ -907,6 +907,19 @@ dependencies = [ "winnow 0.7.15", ] +[[package]] +name = "gix-archive" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "651c99be11aac9b303483193ae50b45eb6e094da4f5ed797019b03948f51aad6" +dependencies = [ + "bstr", + "gix-date", + "gix-error", + "gix-object", + "gix-worktree-stream", +] + [[package]] name = "gix-attributes" version = "0.31.0" @@ -933,6 +946,26 @@ dependencies = [ "gix-error", ] +[[package]] +name = "gix-blame" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c77aaf9f7348f4da3ebfbfbbc35fa0d07155d98377856198dde6f695fd648705" +dependencies = [ + "gix-commitgraph", + "gix-date", + "gix-diff", + "gix-error", + "gix-hash", + "gix-object", + "gix-revwalk", + "gix-trace", + "gix-traverse", + "gix-worktree", + "smallvec", + "thiserror 2.0.18", +] + [[package]] name = "gix-chunk" version = "0.7.0" @@ -957,9 +990,9 @@ dependencies = [ [[package]] name = "gix-commitgraph" -version = "0.34.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aea2fcfa6bc7329cd094696ba76682b89bdb61cafc848d91b34abba1c1d7e040" +checksum = "3196655fd1443f3c58a48c114aa480be3e4e87b393d7292daaa0d543862eb445" dependencies = [ "bstr", "gix-chunk", @@ -971,9 +1004,9 @@ dependencies = [ [[package]] name = "gix-config" -version = "0.53.0" +version = "0.54.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c24b190bd42b55724368c28ae750840b48e2038b9b5281202de6fca4ec1fce1" +checksum = "08939b4c4ed7a663d0e64be9e1e9bdf23a1fb4fcee1febdf449f12229542e50d" dependencies = [ "bstr", "gix-config-value", @@ -1035,21 +1068,51 @@ dependencies = [ [[package]] name = "gix-diff" -version = "0.60.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60592771b104eda4e537c311e8239daef0df651d61e0e21855f7e6166416ff12" +checksum = "88f3b3475e5d3877d7c30c40827cc2441936ce890efc226e5ba4afe3a7ae33f0" dependencies = [ "bstr", + "gix-command", + "gix-filter", + "gix-fs", "gix-hash", "gix-object", + "gix-path", + "gix-tempfile", + "gix-trace", + "gix-traverse", + "gix-worktree", + "imara-diff 0.1.8", + "imara-diff 0.2.0", + "thiserror 2.0.18", +] + +[[package]] +name = "gix-dir" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5da4604a360988f0ba8efe6f90093ca5a844f4a7f8e1a3dcda501ec44e600ea9" +dependencies = [ + "bstr", + "gix-discover", + "gix-fs", + "gix-ignore", + "gix-index", + "gix-object", + "gix-path", + "gix-pathspec", + "gix-trace", + "gix-utils", + "gix-worktree", "thiserror 2.0.18", ] [[package]] name = "gix-discover" -version = "0.48.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "810764b92e8cb95e4d91b7adfc5a14666434fd32ace02900dfb66aae71f845df" +checksum = "c65bd3330fe0cb9d40d875bf862fd5e8ad6fa4164ddbc4842fbeb889c3f0b2c6" dependencies = [ "bstr", "dunce", @@ -1090,9 +1153,9 @@ dependencies = [ [[package]] name = "gix-filter" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eda328750accaac05ce7637298fd7d6ba0d5d7bdf49c21f899d0b97e3df822d" +checksum = "d37598282a6566da6fb52667570c7fe0aedcb122ac886724a9e62a2180523e35" dependencies = [ "bstr", "encoding_rs", @@ -1137,9 +1200,9 @@ dependencies = [ [[package]] name = "gix-hash" -version = "0.22.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8ced05d2d7b13bff08b2f7eb4e47cfeaf00b974c2ddce08377c4fe1f706b3eb" +checksum = "0fb896a02d9ab96fa518475a5f30ad3952010f801a8de5840f633f4a6b985dfb" dependencies = [ "faster-hex", "gix-features", @@ -1149,9 +1212,9 @@ dependencies = [ [[package]] name = "gix-hashtable" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52f1eecdd006390cbed81f105417dbf82a6fe40842022006550f2e32484101da" +checksum = "2664216fc5e89b51e756a4a3ac676315602ce2dac07acf1da959a22038d69b33" dependencies = [ "gix-hash", "hashbrown 0.16.1", @@ -1173,9 +1236,9 @@ dependencies = [ [[package]] name = "gix-index" -version = "0.48.0" +version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13b28482b86662c8b78160e0750b097a35fd61185803a960681351b3a07de07e" +checksum = "1bae54ab14e4e74d5dda60b82ea7afad7c8eb3be68283d6d5f29bd2e6d47fff7" dependencies = [ "bitflags", "bstr", @@ -1210,11 +1273,37 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "gix-merge" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4606747466512d22c2dffc019142e1941238f543987ea51353c938cca80c500" +dependencies = [ + "bstr", + "gix-command", + "gix-diff", + "gix-filter", + "gix-fs", + "gix-hash", + "gix-index", + "gix-object", + "gix-path", + "gix-quote", + "gix-revision", + "gix-revwalk", + "gix-tempfile", + "gix-trace", + "gix-worktree", + "imara-diff 0.1.8", + "nonempty", + "thiserror 2.0.18", +] + [[package]] name = "gix-negotiate" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a925ec9bc3664eaff9c7dc49bc857fe0de7e90ece6e092cb66ba923812824db" +checksum = "6ea064c7595eea08fdd01c70748af747d9acc40f727b61f4c8a2145a5c5fc28c" dependencies = [ "bitflags", "gix-commitgraph", @@ -1226,9 +1315,9 @@ dependencies = [ [[package]] name = "gix-object" -version = "0.57.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "013eae8e072c6155191ac266950dfbc8d162408642571b32e2c6b3e4b03740fb" +checksum = "cafb802bb688a7c1e69ef965612ff5ff859f046bfb616377e4a0ba4c01e43d47" dependencies = [ "bstr", "gix-actor", @@ -1247,9 +1336,9 @@ dependencies = [ [[package]] name = "gix-odb" -version = "0.77.0" +version = "0.78.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8901a182923799e8857ac01bff6d7c6fecea999abd79a86dab638aadbb843f3" +checksum = "24833ae9323b4f7079575fb9f961cf9c414b0afbec428a536ab8e7dd93bc002b" dependencies = [ "arc-swap", "gix-features", @@ -1267,9 +1356,9 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.67.0" +version = "0.68.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a9f96f4058359d6874123f160e5b2044974829a29f3a71bb9c9218d1916c3" +checksum = "e3484119cd19859d7d7639413c27e192478fa354d3f4ff5f7e3c041e8040f0f4" dependencies = [ "clru", "gix-chunk", @@ -1340,9 +1429,9 @@ dependencies = [ [[package]] name = "gix-protocol" -version = "0.58.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c64ec7b04c57df6e97a2ac4738a4a09897b88febd6ec4bd2c5d3ff3ad3849df" +checksum = "4f38666350736b5877c79f57ddae02bde07a4ce186d889adc391e831cddcbe76" dependencies = [ "bstr", "gix-credentials", @@ -1378,9 +1467,9 @@ dependencies = [ [[package]] name = "gix-ref" -version = "0.60.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cc7b230945f02d706a49bcf823b671785ecd9e88e713b8bd2ca5db104c97add" +checksum = "c2159978abb99b7027c8579d15211e262ef0ef2594d5cecb3334fbcbdfe2997c" dependencies = [ "gix-actor", "gix-features", @@ -1399,9 +1488,9 @@ dependencies = [ [[package]] name = "gix-refspec" -version = "0.38.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb3dc194cdc1176fc20f39f233d0d516f83df843ea14a9eb758a2690f3e38d1e" +checksum = "dc806ee13f437428f8a1ba4c72ecfaa3f20e14f5f0d4c2bc17d0b33e794aa6ac" dependencies = [ "bstr", "gix-error", @@ -1415,10 +1504,11 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.42.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9e31cd402edae08c3fdb67917b9fb75b0c9c9bd2fbed0c2dd9c0847039c556" +checksum = "7c08f1ec5d1e6a524f8ba291c41f0ccaef64e48ed0e8cf790b3461cae45f6d3d" dependencies = [ + "bitflags", "bstr", "gix-commitgraph", "gix-date", @@ -1426,14 +1516,15 @@ dependencies = [ "gix-hash", "gix-object", "gix-revwalk", + "gix-trace", "nonempty", ] [[package]] name = "gix-revwalk" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573f6e471d76c0796f0b8ed5a431521ea5d121a7860121a2a9703e9434ab1d52" +checksum = "0e4b2b87772b21ca449249e86d32febadba5cba32b0fcce804ab9cefc6f2111c" dependencies = [ "gix-commitgraph", "gix-date", @@ -1459,9 +1550,9 @@ dependencies = [ [[package]] name = "gix-shallow" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee51037c8a27ddb1c7a6d6db2553d01e501d5b1dae7dc65e41905a70960e658" +checksum = "cbf60711c9083b2364b3fac8a352444af76b17201f3682fdebe74fa66d89a772" dependencies = [ "bstr", "gix-hash", @@ -1470,11 +1561,34 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "gix-status" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d6c598e3fdbc352fba1c5ba7e709e69402fafbc44d9295edad2e3c4738996b" +dependencies = [ + "bstr", + "filetime", + "gix-diff", + "gix-dir", + "gix-features", + "gix-filter", + "gix-fs", + "gix-hash", + "gix-index", + "gix-object", + "gix-path", + "gix-pathspec", + "gix-worktree", + "portable-atomic", + "thiserror 2.0.18", +] + [[package]] name = "gix-submodule" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cba2022599491d620fbc77b3729dba0120862ce9b4af6e3c47d19a9f2a5d884" +checksum = "0ce5c3929c5e6821f651d35e8420f72fea3cfafe9fc1e928a61e718b462c72a5" dependencies = [ "bstr", "gix-config", @@ -1491,6 +1605,7 @@ version = "21.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d22227f6b203f511ff451c33c89899e87e4f571fc596b06f68e6e613a6508528" dependencies = [ + "dashmap", "gix-fs", "libc", "parking_lot", @@ -1524,9 +1639,9 @@ dependencies = [ [[package]] name = "gix-traverse" -version = "0.54.0" +version = "0.55.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c99b3cf9dc87c13f1404e7b0e8c5e4bff4975d6f788831c02d6c006f3c76b4a0" +checksum = "963dc2afcdb611092aa587c3f9365e749ac0a0892ff27662dbc75f26c953fbec" dependencies = [ "bitflags", "gix-commitgraph", @@ -1557,6 +1672,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "befcdbdfb1238d2854591f760a48711bed85e72d80a10e8f2f93f656746ef7c5" dependencies = [ + "bstr", "fastrand", "unicode-normalization", ] @@ -1572,9 +1688,9 @@ dependencies = [ [[package]] name = "gix-worktree" -version = "0.49.0" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "005627fc149315f39473e3e94a50058dd5d345c490a23723f67f32ee9c505232" +checksum = "e6bd5830cbc43c9c00918b826467d2afad685b195cb82329cde2b2d116d2c578" dependencies = [ "bstr", "gix-attributes", @@ -1590,9 +1706,9 @@ dependencies = [ [[package]] name = "gix-worktree-state" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ffce16a83def3651ee4c9872960f4582652fbcc8bbee568c9bae6ffa23894" +checksum = "644a1681f96e1be43c2a8384337d9d220e7624f50db54beda70997052aebf707" dependencies = [ "bstr", "gix-features", @@ -1606,6 +1722,24 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "gix-worktree-stream" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24e3fb70a1f650a5cec7d5b8d10d6d6fe86daf3cf15bde08ba0c70988a2932c3" +dependencies = [ + "gix-attributes", + "gix-error", + "gix-features", + "gix-filter", + "gix-fs", + "gix-hash", + "gix-object", + "gix-path", + "gix-traverse", + "parking_lot", +] + [[package]] name = "glob" version = "0.3.3" @@ -1652,6 +1786,12 @@ dependencies = [ "byteorder", ] +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + [[package]] name = "hashbrown" version = "0.15.5" @@ -1938,6 +2078,25 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "imara-diff" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17d34b7d42178945f775e84bc4c36dde7c1c6cdfea656d3354d009056f2bb3d2" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "imara-diff" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f01d462f766df78ab820dd06f5eb700233c51f0f4c2e846520eaf4ba6aa5c5c" +dependencies = [ + "hashbrown 0.15.5", + "memchr", +] + [[package]] name = "indexmap" version = "2.13.1" @@ -2140,12 +2299,6 @@ version = "0.2.184" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" -[[package]] -name = "libm" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" - [[package]] name = "libredox" version = "0.1.15" @@ -2264,16 +2417,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "minicov" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4869b6a491569605d66d3952bcdf03df789e5b536e5f0cf7758a7f08a55ae24d" -dependencies = [ - "cc", - "walkdir", -] - [[package]] name = "miniz_oxide" version = "0.8.9" @@ -2344,15 +2487,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" -[[package]] -name = "nu-ansi-term" -version = "0.50.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" -dependencies = [ - "windows-sys 0.61.2", -] - [[package]] name = "num-conv" version = "0.2.1" @@ -2366,7 +2500,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", - "libm", ] [[package]] @@ -2381,12 +2514,6 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" -[[package]] -name = "oorandom" -version = "11.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" - [[package]] name = "openssl-probe" version = "0.2.1" @@ -2625,17 +2752,6 @@ dependencies = [ "pyo3-build-config", ] -[[package]] -name = "pyo3-log" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c2ec80932c5c3b2d4fbc578c9b56b2d4502098587edb8bef5b6bfcad43682e" -dependencies = [ - "arc-swap", - "log", - "pyo3", -] - [[package]] name = "pyo3-macros" version = "0.28.3" @@ -3350,6 +3466,8 @@ dependencies = [ "fluent-uri", "futures", "gix", + "gix-worktree", + "gix-worktree-state", "globset", "indexmap", "log", @@ -3379,43 +3497,6 @@ dependencies = [ "zip", ] -[[package]] -name = "sysand-java" -version = "0.0.10" -dependencies = [ - "camino", - "indexmap", - "jni", - "reqwest", - "reqwest-middleware", - "serde", - "serde_json", - "sysand-core", - "tokio", - "url", -] - -[[package]] -name = "sysand-js" -version = "0.0.10" -dependencies = [ - "camino", - "console_error_panic_hook", - "console_log", - "log", - "regex", - "semver", - "serde", - "serde_json", - "sha2", - "sysand-core", - "thiserror 2.0.18", - "typed-path", - "wasm-bindgen", - "wasm-bindgen-test", - "web-sys", -] - [[package]] name = "sysand-macros" version = "0.0.10" @@ -3426,25 +3507,6 @@ dependencies = [ "syn", ] -[[package]] -name = "sysand-py" -version = "0.0.10" -dependencies = [ - "camino", - "predicates", - "pyo3", - "pyo3-log", - "reqwest", - "reqwest-middleware", - "semver", - "sysand", - "sysand-core", - "tempfile", - "tokio", - "typed-path", - "url", -] - [[package]] name = "system-configuration" version = "0.7.0" @@ -3640,9 +3702,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.25.10+spec-1.1.0" +version = "0.25.11+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82418ca169e235e6c399a84e395ab6debeb3bc90edc959bf0f48647c6a32d1b" +checksum = "0b59c4d22ed448339746c59b905d24568fcbb3ab65a500494f7b8c3e97739f2b" dependencies = [ "indexmap", "serde_core", @@ -3941,45 +4003,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "wasm-bindgen-test" -version = "0.3.67" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941c102b3f0c15b6d72a53205e09e6646aafcf2991e18412cc331dbac1806bc0" -dependencies = [ - "async-trait", - "cast", - "js-sys", - "libm", - "minicov", - "nu-ansi-term", - "num-traits", - "oorandom", - "serde", - "serde_json", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-bindgen-test-macro", - "wasm-bindgen-test-shared", -] - -[[package]] -name = "wasm-bindgen-test-macro" -version = "0.3.67" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26bd6570f39bb1440fd8f01b63461faaf2a3f6078a508e4e54efa99363108d2" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "wasm-bindgen-test-shared" -version = "0.2.117" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c29582b14d5bf030b02fa232b9b57faf2afc322d2c61964dd80bad02bf76207" - [[package]] name = "wasm-encoder" version = "0.244.0" diff --git a/Cargo.toml b/Cargo.toml index c809a8e1..9505da93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,9 +4,10 @@ members = [ "sysand", "core", "macros", - "bindings/py", - "bindings/js", - "bindings/java", + # TODO: fix and reenable once core impl is done + # "bindings/py", + # "bindings/js", + # "bindings/java", ] default-members = ["sysand", "core"] diff --git a/core/Cargo.toml b/core/Cargo.toml index b52cebc5..610801b5 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -60,7 +60,9 @@ walkdir = "2.5.0" wasm-bindgen = { version = "0.2.114", default-features = false, optional = true } zip = { version = "8.0.0", default-features = false, optional = true, features = ["deflate"] } url = { version = "2.5.8", default-features = false } -gix = { version = "0.80.0", default-features = false, optional = true, features = ["blocking-http-transport-reqwest", "blocking-network-client", "worktree-mutation"] } +gix = { version = "0.81.0", default-features = false, optional = true, features = ["blocking-http-transport-reqwest", "blocking-network-client", "worktree-mutation", "sha1"] } # may also need "revision" feature +gix-worktree = "0.50" +gix-worktree-state = "0.28" logos = "0.16.1" futures = { version = "0.3.32", default-features = false, features = ["alloc", "async-await"] } tokio = { version = "1.50.0", default-features = false, features = ["rt", "io-util"] } diff --git a/core/src/commands/add.rs b/core/src/commands/add.rs index 0b3ca141..122d5ec4 100644 --- a/core/src/commands/add.rs +++ b/core/src/commands/add.rs @@ -27,85 +27,92 @@ pub fn do_add( project: &mut P, usage_raw: &InterchangeProjectUsageRaw, ) -> Result> { - let usage: InterchangeProjectUsageG = usage_raw.validate()?.into(); + let usage: InterchangeProjectUsageG = usage_raw.validate()?.into(); let adding = "Adding"; let header = crate::style::get_style_config().header; - log::info!( - "{header}{adding:>12}{header:#} usage: `{}` {}", - &usage_raw.resource, - usage_raw - .version_constraint - .as_ref() - .map(|vr| vr.to_string()) - .unwrap_or("".to_string()), - ); + log::info!("{header}{adding:>12}{header:#} usage: {usage_raw}"); if let Some(info) = project.get_info().map_err(AddError::Project)?.as_mut() { - if let Some(u) = info.usage.iter_mut().find(|u| u.resource == usage.resource) { - match (usage.version_constraint, &mut u.version_constraint) { - (None, None) => { - log::warn!( - "ignoring usage `{}`,\n\ + if let Some(_existing_usage) = info.usage.iter_mut().find(|u| *u == &usage) { + log::warn!( + "ignoring usage `{usage}`,\n\ {SP:>8} since it is already present", - usage.resource, - ); - return Ok(false); - } - (None, Some(vc)) => { - log::warn!( - "ignoring usage `{}`\n\ - {SP:>8} without a version constraint, since it is already present with\n\ - {SP:>8} version constraint `{}`", - usage.resource, - vc - ); - return Ok(false); - } - (Some(vc), vc_current @ None) => { - log::warn!( - "usage `{}` is already present,\n\ - {SP:>8} but without a version constraint; version constraint\n\ - {SP:>8} `{}` will be added to it", - usage.resource, - vc - ); - *vc_current = Some(vc); - } - (Some(vc_new), Some(vc_current)) => { - // TODO: more intelligent merging of constraints - if &vc_new == vc_current { - log::warn!( - "ignoring usage `{}` with version constraint\n\ - {SP:>8} `{}`, since it is already present with identical version constraint", - usage.resource, - vc_new - ) - } else { - log::warn!( - "usage `{}` is already present, but with version\n\ - {SP:>8} constraint `{}`; new version constraint\n\ - {SP:>8} `{}` will be added to the existing ones; this may\n\ - {SP:>8} result in failed version resolution or conflicting symbol errors", - u.resource, - vc_current, - vc_new - ); - vc_current.push_str(", "); - vc_current.push_str(&vc_new); - } - } - } + ); + Ok(false) } else { info.usage.push(usage); + Ok(true) } - - project.put_info(info, true).map_err(AddError::Project)?; - - Ok(true) } else { Err(AddError::MissingInfo( "project is missing the interchange project information", )) } + + // if let Some(info) = project.get_info().map_err(AddError::Project)?.as_mut() { + // if let Some(u) = info.usage.iter_mut().find(|u| u.resource == usage.resource) { + // match (usage.version_constraint, &mut u.version_constraint) { + // (None, None) => { + // log::warn!( + // "ignoring usage `{}`,\n\ + // {SP:>8} since it is already present", + // usage.resource, + // ); + // return Ok(false); + // } + // (None, Some(vc)) => { + // log::warn!( + // "ignoring usage `{}`\n\ + // {SP:>8} without a version constraint, since it is already present with\n\ + // {SP:>8} version constraint `{}`", + // usage.resource, + // vc + // ); + // return Ok(false); + // } + // (Some(vc), vc_current @ None) => { + // log::warn!( + // "usage `{}` is already present,\n\ + // {SP:>8} but without a version constraint; version constraint\n\ + // {SP:>8} `{}` will be added to it", + // usage.resource, + // vc + // ); + // *vc_current = Some(vc); + // } + // (Some(vc_new), Some(vc_current)) => { + // // TODO: more intelligent merging of constraints + // if &vc_new == vc_current { + // log::warn!( + // "ignoring usage `{}` with version constraint\n\ + // {SP:>8} `{}`, since it is already present with identical version constraint", + // usage.resource, + // vc_new + // ) + // } else { + // log::warn!( + // "usage `{}` is already present, but with version\n\ + // {SP:>8} constraint `{}`; new version constraint\n\ + // {SP:>8} `{}` will be added to the existing ones; this may\n\ + // {SP:>8} result in failed version resolution or conflicting symbol errors", + // u.resource, + // vc_current, + // vc_new + // ); + // vc_current.push_str(", "); + // vc_current.push_str(&vc_new); + // } + // } + // } + // } else { + // info.usage.push(usage); + // } + // project.put_info(info, true).map_err(AddError::Project)?; + // Ok(true) + // } else { + // Err(AddError::MissingInfo( + // "project is missing the interchange project information", + // )) + // } } diff --git a/core/src/commands/build.rs b/core/src/commands/build.rs index 428f5455..570e9b46 100644 --- a/core/src/commands/build.rs +++ b/core/src/commands/build.rs @@ -4,7 +4,9 @@ use thiserror::Error; use crate::{ env::utils::{CloneError, ErrorBound}, include::IncludeError, - model::InterchangeProjectValidationError, + model::{ + InterchangeProjectUsageG, InterchangeProjectUsageRaw, InterchangeProjectValidationError, + }, project::{ ProjectRead, local_kpar::{IntoKparError, LocalKParProject}, @@ -149,7 +151,7 @@ pub enum KParBuildError { "project includes a path usage `{0}`,\n\ which is unlikely to be available on other computers at the same path" )] - PathUsage(String), + PathUsage(InterchangeProjectUsageRaw), #[error( "workspace sets metamodel `{workspace_metamodel}`, but project `{project_path}` \ sets a different metamodel `{project_metamodel}` in `.meta.json`;\n\ @@ -226,6 +228,17 @@ pub fn default_kpar_file_name( )) } +/// Case-insentively check if `iri` begins with `file:` +fn is_file_scheme(iri: impl AsRef) -> bool { + let iri = iri.as_ref(); + iri.len() >= 5 + && iri + .as_bytes() + .iter() + .zip(b"file:") + .all(|(c1, &c2)| c1.to_ascii_lowercase() == c2) +} + pub fn do_build_kpar, Pr: ProjectRead>( project: &Pr, path: P, @@ -277,21 +290,20 @@ fn do_build_kpar_inner, Pr: ProjectRead>( if let Some(u) = info.usage.iter().find(|x| { // Case-insensitively match `file:` scheme - x.resource.len() >= 5 - && x.resource - .as_bytes() - .iter() - .zip(b"file:") - .all(|(c1, &c2)| c1.to_ascii_lowercase() == c2) + match x { + InterchangeProjectUsageG::Resource { resource: url, .. } + | InterchangeProjectUsageG::Url { url, .. } => is_file_scheme(url), + InterchangeProjectUsageG::Path { .. } => true, + _ => false, + } }) { if allow_path_usage { log::warn!( - "project includes a path usage `{}`,\n\ + "project includes a path usage `{u}`,\n\ which is unlikely to be available on other computers at the same path", - u.resource ); } else { - return Err(KParBuildError::PathUsage(u.resource.clone())); + return Err(KParBuildError::PathUsage(u.to_owned())); } } diff --git a/core/src/commands/lock.rs b/core/src/commands/lock.rs index 8100048b..2590e92f 100644 --- a/core/src/commands/lock.rs +++ b/core/src/commands/lock.rs @@ -18,7 +18,7 @@ use crate::project::{editable::EditableProject, local_src::LocalSrcProject, util use crate::{ context::ProjectContext, lock::{Lock, Project, Usage, hash_str}, - model::{InterchangeProjectUsage, InterchangeProjectValidationError}, + model::{InterchangeProjectUsage, InterchangeProjectUsageG, InterchangeProjectValidationError}, project::{CanonicalizationError, ProjectRead, memory::InMemoryProject, utils::FsIoError}, resolve::ResolveRead, solve::pubgrub::{SolverError, solve}, @@ -113,6 +113,8 @@ pub fn do_lock_projects< .map_err(LockProjectError::InputProjectError)?; debug_assert!(!sources.is_empty()); + // TODO :this needs rethinking. How to map deps from InterchangeProjectUsage to proper Usage string? + // This cannot be done before resolving them lock.projects.push(Project { name: Some(info.name), publisher: info.publisher, @@ -126,7 +128,7 @@ pub fn do_lock_projects< usages: info .usage .iter() - .map(|u| Usage::from(u.resource.clone())) + .map(InterchangeProjectUsageG::to_lock_usage) .collect(), }); @@ -198,6 +200,8 @@ pub fn do_lock_extend< Vec::new() }; + // TODO: rewrite lockfile construction, it can only be done after everything is resolved to have + // identifiers. let lock_project = Project { name: Some(info.name), publisher: info.publisher, @@ -208,8 +212,8 @@ pub fn do_lock_extend< sources, usages: info .usage - .into_iter() - .map(|u| Usage::from(u.resource)) + .iter() + .map(InterchangeProjectUsageG::to_lock_usage) .collect(), }; if lock_projects.contains(&lock_project.hash_val()) { diff --git a/core/src/commands/sync.rs b/core/src/commands/sync.rs index 4df3f9a7..3a92499b 100644 --- a/core/src/commands/sync.rs +++ b/core/src/commands/sync.rs @@ -224,7 +224,12 @@ where log::debug!("trying to install `{uri}` from remote_kpar: {remote_kpar}"); try_install(uri, &project.checksum, storage, env)?; } - Source::RemoteGit { remote_git } => { + Source::RemoteGit { + remote_git, + // TODO: implement (shallow) cloning of the sepecified rev and dir traversal + rev, + path, + } => { let uri = main_uri.as_ref().ok_or_else(|| { SyncError::MissingIriRemoteGitPath(remote_git.as_str().into()) })?; @@ -301,6 +306,7 @@ fn try_install< ) -> Result<(), SyncError> { let uri = uri.as_ref(); let checksum = checksum.as_ref(); + // TODO: don't calculate checksum multiple times for the same project let project_checksum = storage .checksum_canonical_hex() .map_err(|e| SyncError::ProjectRead(e.to_string()))? diff --git a/core/src/lock.rs b/core/src/lock.rs index 405b14e3..bf6c5017 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -9,7 +9,6 @@ use std::{ str::FromStr, }; -use fluent_uri::Iri; use semver::Version; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -27,7 +26,7 @@ use crate::{ }; pub const LOCKFILE_PREFIX: &str = "# This file is automatically generated by Sysand and is not intended to be edited manually.\n\n"; -pub const CURRENT_LOCK_VERSION: &str = "0.3"; +pub const CURRENT_LOCK_VERSION: &str = "0.4"; pub const SUPPORTED_LOCK_VERSIONS: &[&str] = &[CURRENT_LOCK_VERSION]; pub const LOCKFILE_ENTRIES: &[&str] = &["lock_version", "project"]; @@ -170,7 +169,7 @@ pub enum ValidationError { ProjectWithoutId(String), #[error("unsatisfied usage `{usage}` for {project_with_name} in lockfile")] UnsatisfiedUsage { - usage: String, + usage: Usage, project_with_name: String, }, // #[error( @@ -304,7 +303,8 @@ impl Lock { } fn validate_usages(&self) -> Result<(), ValidationError> { - let mut iri_versions = HashSet::new(); + let mut satisfied_usages = HashSet::new(); + // Each project in lockfile will be identified here by a pseudo-usage for project in &self.projects { let _ = Version::parse(&project.version).inspect_err(|err| { log::warn!( @@ -316,15 +316,15 @@ impl Lock { err ); }); - for iri in &project.identifiers { - iri_versions.insert(iri.clone()); + for id in &project.identifiers { + satisfied_usages.insert(id.to_owned()); } } for project in &self.projects { for usage in &project.usages { - if !iri_versions.contains(&usage.resource) { + if !satisfied_usages.contains(&usage.0) { return Err(ValidationError::UnsatisfiedUsage { - usage: usage.resource.clone(), + usage: usage.to_owned(), project_with_name: project_with(project.name.clone()), }); } @@ -402,6 +402,7 @@ pub struct Project { pub version: String, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub exports: Vec, + // TODO: what to do about identifiers for non-IRI/URL projects? #[serde(skip_serializing_if = "Vec::is_empty", default)] pub identifiers: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] @@ -417,7 +418,7 @@ impl std::hash::Hash for Project { /// (including having no known IRIs) and/or ordering between different /// project instances fn hash(&self, state: &mut H) { - self.checksum.hash(state); + state.write(self.checksum.as_bytes()); } } @@ -483,6 +484,59 @@ impl Project { self.hash(&mut hasher); ProjectHash(hasher.finish()) } + + // /// Creates `Vec` from project info, allowing to determine whether the + // /// usage is fulfilled. + // pub fn to_usage(&self) -> Vec { + // let mut usages = Vec::new(); + // for s in &self.sources { + // match s { + // Source::Editable { editable: path } + // | Source::LocalSrc { src_path: path } + // | Source::LocalKpar { kpar_path: path } => usages.push(Usage::Path { + // path: path.to_owned(), + // }), + // // TODO: registry variant seems unused, remote_kpar/remote_src is used for them currently + // // Source::Registry { registry: _ } | + // Source::RemoteGit { + // remote_git: _, + // rev: _, + // path: _, + // } => { + // if let Some(publisher) = &self.publisher + // && let Some(name) = &self.name + // { + // usages.push(Usage::GitOrIndex { + // publisher: publisher.to_owned(), + // name: name.to_owned(), + // }) + // } else { + // log::debug!( + // "lockfile project with registry/git source missing publisher ({:?}) and/or name ({:?})", + // self.publisher, + // self.name + // ) + // } + // } + // Source::RemoteKpar { + // remote_kpar: url, + // remote_kpar_size: _, + // } + // | Source::RemoteSrc { remote_src: url } => { + // usages.push(Usage::Url { + // url: url.to_owned(), + // }); + // } // Source::RemoteApi { remote_api: _ } => todo!(), + // } + // } + // // For backwards compatibility, also treat everything as resource + // for i in &self.identifiers { + // usages.push(Usage::Resource { + // resource: i.to_owned(), + // }); + // } + // usages + // } } const SOURCE_ENTRIES: &[&str] = &[ @@ -497,10 +551,58 @@ const SOURCE_ENTRIES: &[&str] = &[ "remote_api", ]; +/// easyfind +/// +/// Interactions that have to be considered: +/// - Identifier <-> Usage (to resolve deps in lockfile) +/// - Name/Publisher <-> Usage (maybe not needed if identifier takes care of this) +/// - InterchangeProjectUsage <-> Usage (to create a lockfile) +/// - Usage <-> Source (maybe not needed if Identifier takes care of this) +/// - InterchangeProjectUsage <-> env structure (storing new usage types) +/// +/// Identifying projects in lockfile/env: +/// - if publisher is present and name/publisher obey by our rules, normalized PURL +/// is used as proposed, and is the canonical (first) identifier +/// - if publisher is present but publisher/name do not obey our rules, +/// `urn:sysand:/` is the canonical identifier +/// - publisher might not be present only for legacy usages, for these cases IRI is uses as-is +/// - additional identifiers are added where possible: +/// - workspace-provided aliases +/// - sysand.toml aliases +/// - any IRIs/URLs the project is referred to by current or other projects +/// These additional identifiers may be used in dependency resolution/syncing to +/// avoid re-resolving/re-downloading projects that have identifiers of already +/// resolved/downloaded projects +/// - projects in lockfile/env.toml always use canonical identifiers for usages +/// - this way, `identifiers[0]` is enough to identify any project in lockfile/env.toml +/// +/// Interaction between Name, Publisher, Source, Usage: +/// - if Publisher is present Name and Publisher obey by our rules, +/// `pkg:sysand//` is created as an Identifier +/// and specifed in Usage (TODO: how exactly) for other projects +/// - if Publisher is not present +/// +/// - legacy (resource): resource=IRI +/// - URL: url=URL +/// - path: local_src=rel-path for src, local_kpar=rel-path for kpar +/// - git: +/// - index: TODO: publisher and name will be present, but how to use them? They may +/// contain anything. Simplest thing that works: pub=publisher, name=name +/// +/// Identifier changes from an IRI to a table with a few distinct kinds: +/// - legacy: `iri`= IRI +/// - path: `path`= rel-path +/// - index: `publisher`=publisher, `name`=name (TODO: how does this interact with env structure?) +/// +/// +/// Env structure changes (from proposed env structure, not current): +/// - legacy: no change +/// - `pkg:sysand//` for eligible projects +/// - #[derive(Clone, Eq, Debug, Deserialize, Ord, PartialEq, PartialOrd, Serialize)] #[serde(untagged)] pub enum Source { - // Path must be a Unix path relative to workspace root + /// Path, must be a Unix path relative to workspace root Editable { #[serde( deserialize_with = "deserialize_unix_path", @@ -508,6 +610,7 @@ pub enum Source { )] editable: Utf8UnixPathBuf, }, + /// Path, must be a Unix path relative to workspace root LocalSrc { #[serde( deserialize_with = "deserialize_unix_path", @@ -515,6 +618,7 @@ pub enum Source { )] src_path: Utf8UnixPathBuf, }, + /// Path, must be a Unix path relative to workspace root LocalKpar { #[serde( deserialize_with = "deserialize_unix_path", @@ -522,22 +626,37 @@ pub enum Source { )] kpar_path: Utf8UnixPathBuf, }, - Registry { - registry: String, - }, + // Registry { + // registry: String, + // }, + /// Direct URL to a KPAR file RemoteKpar { remote_kpar: String, remote_kpar_size: Option, }, - RemoteSrc { - remote_src: String, - }, + /// Direct URL to an interchange project directory + RemoteSrc { remote_src: String }, + /// Direct URL of a git repository RemoteGit { remote_git: String, + /// SHA1 of the commit we used, no matter how user specified it: + /// - IRI/URL: rev of HEAD branch + /// - git + rev: specified rev + /// - git + tag: rev of tag + /// - git + branch: latest rev of branch + // TODO: maybe prepend SHA1/SHA256? + // For now we'll use the length for this (40 hex chars for SHA1, 64 for SHA256). + // Cargo seemingly? does this + rev: String, + /// Path of the project within the repo. None if at repo root + // TODO: use a better type + // Maybe use a String and skip if empty? + #[serde(skip_serializing_if = "Option::is_none")] + path: Option, }, - RemoteApi { - remote_api: String, - }, + // RemoteApi { + // remote_api: String, + // }, } impl Source { @@ -557,14 +676,22 @@ impl Source { Source::LocalSrc { src_path } => { table.insert("src_path", Value::from(src_path.as_str())); } - Source::Registry { registry } => { - table.insert("registry", Value::from(registry)); - } - Source::RemoteApi { remote_api } => { - table.insert("remote_api", Value::from(remote_api)); - } - Source::RemoteGit { remote_git } => { + // Source::Registry { registry } => { + // table.insert("registry", Value::from(registry)); + // } + // Source::RemoteApi { remote_api } => { + // table.insert("remote_api", Value::from(remote_api)); + // } + Source::RemoteGit { + remote_git, + rev, + path, + } => { table.insert("remote_git", Value::from(remote_git)); + table.insert("rev", Value::from(rev)); + if let Some(path) = path { + table.insert("path", Value::from(path)); + } } Source::RemoteKpar { remote_kpar, @@ -584,45 +711,133 @@ impl Source { } } -#[derive(Clone, Eq, Debug, Ord, PartialEq, PartialOrd)] -pub struct Usage { - pub resource: String, +#[derive(Clone, Eq, Debug, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Hash)] +pub struct Usage(String); + +impl Display for Usage { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0) + } } impl From for Usage { - fn from(resource: String) -> Self { - Self { resource } + fn from(value: String) -> Self { + Self(value) } } -impl From> for Usage { - fn from(resource: Iri) -> Self { - Self { - resource: resource.into_string(), - } +impl From<&str> for Usage { + fn from(value: &str) -> Self { + Self(value.to_owned()) } } impl Usage { pub fn to_toml(&self) -> Value { - Value::from(&self.resource) + Value::from(&self.0) } } -impl<'de> Deserialize<'de> for Usage { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Ok(Usage { resource: s }) - } -} +// #[derive(Clone, Eq, Debug, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Hash)] +// #[serde(untagged)] +// pub enum Usage { +// Resource { +// resource: String, +// }, +// Url { +// url: String, +// }, +// Path { +// #[serde( +// deserialize_with = "parse_unix_path", +// serialize_with = "serialize_unix_path" +// )] +// path: Utf8UnixPathBuf, +// }, +// // For both git and index, it is sufficient to know publisher and name +// GitOrIndex { +// publisher: String, +// name: String, +// }, +// } + +// impl Display for Usage { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// match self { +// Usage::Resource { resource } => { +// write!(f, "IRI `{resource}`") +// } +// Usage::Url { url } => { +// write!(f, "URL `{url}`") +// } +// Usage::Path { path } => write!(f, "path `{path}`"), +// Usage::GitOrIndex { publisher, name } => { +// write!(f, "`{publisher}/{name}`") +// } +// } +// } +// } + +// impl From for Usage { +// fn from(resource: String) -> Self { +// Self { resource } +// } +// } + +// impl From> for Usage { +// fn from(resource: Iri) -> Self { +// Self { +// resource: resource.into_string(), +// } +// } +// } + +// impl Usage { +// pub fn to_toml(&self) -> InlineTable { +// let mut table = InlineTable::new(); +// match self { +// Usage::Resource { resource } => table.insert("resource", resource.into()), +// Usage::Url { url } => table.insert("url", url.into()), +// Usage::Path { path } => table.insert("path", path.as_str().into()), +// Usage::GitOrIndex { publisher, name } => { +// table.insert("publisher", publisher.into()); +// table.insert("name", name.into()) +// } +// }; +// table +// } +// } + +// impl<'de> Deserialize<'de> for Usage { +// fn deserialize(deserializer: D) -> Result +// where +// D: serde::Deserializer<'de>, +// { +// let s = String::deserialize(deserializer)?; +// Ok(Usage { resource: s }) +// } +// } // impl From for Usage { // fn from(value: InterchangeProjectUsageRaw) -> Usage { -// Usage { -// resource: value.resource, +// match value { +// InterchangeProjectUsageG::Resource { +// resource, +// version_constraint: _, +// } => Usage::Resource { resource }, +// InterchangeProjectUsageG::Url { url } => Usage::Url { url }, +// InterchangeProjectUsageG::Path { path } => Usage::Path { path: path.into() }, +// InterchangeProjectUsageG::Git { +// git, +// id, +// publisher, +// name, +// } => {} +// InterchangeProjectUsageG::Index { +// publisher, +// name, +// version_constraint, +// } => todo!(), // } // } // } @@ -661,6 +876,8 @@ mod tests { }; const CHECKSUM: &str = "0000000000000000000000000000000000000000000000000000000000000000"; + const GIT_REV_SHA256: &str = "1122334455667788990011223344556677889900112233445566778899001122"; + const GIT_REV_SHA1: &str = "1122334455667788990011223344556677889900"; #[test] fn check_current_lock_version() { @@ -950,9 +1167,9 @@ checksum = "{CHECKSUM}" Source::LocalSrc { src_path: Utf8UnixPathBuf::from("example/path"), }, - Source::Registry { - registry: "www.example.com".to_string(), - }, + // Source::Registry { + // registry: "www.example.com".to_string(), + // }, Source::RemoteKpar { remote_kpar: "www.example.com/remote.kpar".to_string(), remote_kpar_size: Some(64), @@ -962,10 +1179,12 @@ checksum = "{CHECKSUM}" }, Source::RemoteGit { remote_git: "github.com/example/remote.git".to_string(), + rev: GIT_REV_SHA1.to_owned(), + path: None, }, - Source::RemoteApi { - remote_api: "www.example.com/api".to_string(), - }, + // Source::RemoteApi { + // remote_api: "www.example.com/api".to_string(), + // }, ], checksum: CHECKSUM.to_string(), }], @@ -998,9 +1217,7 @@ checksum = "{CHECKSUM}" version: "0.5.1".to_string(), exports: vec![], identifiers: vec![], - usages: vec![Usage { - resource: "urn:kpar:usage".to_string(), - }], + usages: vec![Usage::from("urn:kpar:usage")], sources: vec![], checksum: CHECKSUM.to_string(), }], @@ -1028,15 +1245,9 @@ checksum = "{CHECKSUM}" exports: vec![], identifiers: vec![], usages: vec![ - Usage { - resource: "urn:kpar:first".to_string(), - }, - Usage { - resource: "urn:kpar:second".to_string(), - }, - Usage { - resource: "urn:kpar:third".to_string(), - }, + Usage::from("urn:kpar:first"), + Usage::from("urn:kpar:second"), + Usage::from("urn:kpar:third"), ], sources: vec![], checksum: CHECKSUM.to_string(), @@ -1170,16 +1381,7 @@ checksum = "{CHECKSUM}" Lock { lock_version: CURRENT_LOCK_VERSION.to_string(), projects: vec![ - make_project( - None, - None, - "0.0.1", - &[], - &[], - &[Usage { - resource: iri.to_string(), - }], - ), + make_project(None, None, "0.0.1", &[], &[], &[Usage::from(iri)]), make_project(None, None, "0.0.1", &[], &[iri], &[]), ], } @@ -1200,14 +1402,7 @@ checksum = "{CHECKSUM}" "0.0.1", &[], &[], - &[ - Usage { - resource: iri1.to_string(), - }, - Usage { - resource: iri2.to_string(), - }, - ], + &[Usage::from(iri1), Usage::from(iri2)], ), make_project(None, None, "0.0.1", &[], &[iri1], &[]), make_project(None, None, "0.0.1", &[], &[iri2], &[]), @@ -1224,26 +1419,8 @@ checksum = "{CHECKSUM}" Lock { lock_version: CURRENT_LOCK_VERSION.to_string(), projects: vec![ - make_project( - None, - None, - "0.0.1", - &[], - &[], - &[Usage { - resource: iri1.to_string(), - }], - ), - make_project( - None, - None, - "0.0.1", - &[], - &[iri1], - &[Usage { - resource: iri2.to_string(), - }], - ), + make_project(None, None, "0.0.1", &[], &[], &[Usage::from(iri1)]), + make_project(None, None, "0.0.1", &[], &[iri1], &[Usage::from(iri2)]), make_project(None, None, "0.0.1", &[], &[iri2], &[]), ], } @@ -1274,16 +1451,7 @@ checksum = "{CHECKSUM}" let Err(err) = Lock { lock_version: CURRENT_LOCK_VERSION.to_string(), projects: vec![ - make_project( - None, - None, - "0.0.1", - &[name], - &[], - &[Usage { - resource: iri.to_string(), - }], - ), + make_project(None, None, "0.0.1", &[name], &[], &[Usage::from(iri)]), make_project(None, None, "0.0.1", &[name], &[iri], &[]), ], } @@ -1312,9 +1480,7 @@ checksum = "{CHECKSUM}" "0.0.1", &[name1, name2, name3], &[], - &[Usage { - resource: iri.to_string(), - }], + &[Usage::from(iri)], ), make_project(None, None, "0.0.1", &[name2, name3, name4], &[iri], &[]), ], @@ -1329,9 +1495,7 @@ checksum = "{CHECKSUM}" #[test] fn validate_unsatisfied_usage() { - let usage_in = Usage { - resource: "urn:kpar:test".to_string(), - }; + let usage_in = Usage::from("urn:kpar:test"); let Err(err) = Lock { lock_version: CURRENT_LOCK_VERSION.to_string(), projects: vec![make_project( @@ -1353,7 +1517,7 @@ checksum = "{CHECKSUM}" else { panic!() }; - assert_eq!(usage, usage_in.resource); + assert_eq!(usage, usage_in); assert_eq!(project_with_name, project_with::(None)); } @@ -1438,12 +1602,8 @@ checksum = "{CHECKSUM}" #[test] fn sort_sources() { - let usage1 = Usage { - resource: "urn:kpar:a".to_string(), - }; - let usage2 = Usage { - resource: "urn:kpar:b".to_string(), - }; + let usage1 = Usage::from("urn:kpar:a"); + let usage2 = Usage::from("urn:kpar:b"); let project1 = make_project( None, None, @@ -1464,12 +1624,9 @@ checksum = "{CHECKSUM}" #[test] fn sort_sources_with_constraints() { - let usage1 = Usage { - resource: "urn:kpar:a".to_string(), - }; - let usage2 = Usage { - resource: "urn:kpar:a".to_string(), - }; + let usage1 = Usage::from("urn:kpar:a"); + let usage2 = Usage::from("urn:kpar:a"); + let project1 = make_project( None, None, diff --git a/core/src/model.rs b/core/src/model.rs index 7525d05e..c4861c30 100644 --- a/core/src/model.rs +++ b/core/src/model.rs @@ -12,6 +12,8 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use typed_path::{Utf8UnixPath, Utf8UnixPathBuf}; +use crate::{lock::Usage, project::utils::make_identifier_iri}; + // pub struct RawIri(String); // pub struct ParsedIri(fluent_uri::Iri); // pub struct NormalisedIri(fluent_uri::Iri); @@ -21,56 +23,391 @@ pub const KNOWN_METAMODELS: [&str; 2] = [ "https://www.omg.org/spec/KerML/20250201", ]; +// #[derive(Eq, Clone, PartialEq, Serialize, Deserialize, Hash, Debug)] +// #[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))] +// #[serde(rename_all = "camelCase")] +// pub struct InterchangeProjectUsageG { +// pub resource: Iri, // TODO: We should have a fallback for invalid IRIs +// #[serde(skip_serializing_if = "Option::is_none")] +// pub version_constraint: Option, // TODO: We should have a fallback for invalid semvers +// } +// pub type InterchangeProjectUsageRaw = InterchangeProjectUsageG; +// pub type InterchangeProjectUsage = +// InterchangeProjectUsageG, semver::VersionReq>; + +// impl InterchangeProjectUsageRaw { +// pub fn validate(&self) -> Result { +// Ok(InterchangeProjectUsage { +// resource: fluent_uri::Iri::parse(self.resource.clone()) +// .map_err(|(e, val)| InterchangeProjectValidationError::IriParse(val, e))?, + +// version_constraint: self +// .version_constraint +// .as_ref() +// .map(|c| { +// semver::VersionReq::parse(c).map_err(|e| { +// InterchangeProjectValidationError::SemVerConstraintParse(c.to_owned(), e) +// }) +// }) +// .transpose()?, +// }) +// } +// } + +// impl From for InterchangeProjectUsageRaw { +// fn from(value: InterchangeProjectUsage) -> InterchangeProjectUsageRaw { +// InterchangeProjectUsageRaw { +// resource: value.resource.to_string(), +// version_constraint: value.version_constraint.map(|x| x.to_string()), +// } +// } +// } + +// impl From, semver::VersionReq>> +// for InterchangeProjectUsageG +// { +// fn from(value: InterchangeProjectUsageG, semver::VersionReq>) -> Self { +// InterchangeProjectUsageG { +// resource: value.resource.to_string(), +// version_constraint: value.version_constraint, +// } +// } +// } + +// impl TryFrom for InterchangeProjectUsage { +// type Error = InterchangeProjectValidationError; + +// fn try_from(value: InterchangeProjectUsageRaw) -> Result { +// value.validate() +// } +// } + #[derive(Eq, Clone, PartialEq, Serialize, Deserialize, Hash, Debug)] #[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))] -#[serde(rename_all = "camelCase")] -pub struct InterchangeProjectUsageG { - pub resource: Iri, // TODO: We should have a fallback for invalid IRIs - #[serde(skip_serializing_if = "Option::is_none")] - pub version_constraint: Option, // TODO: We should have a fallback for invalid semvers +#[serde(rename_all = "camelCase", untagged)] +pub enum GitId { + Rev(String), + Tag(String), + Branch(String), +} + +impl Display for GitId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + GitId::Rev(r) => write!(f, "commit `{r}`"), + GitId::Tag(t) => write!(f, "tag `{t}`"), + GitId::Branch(b) => write!(f, "branch `{b}`"), + } + } +} + +/// Usage of a project. Legacy (KerML 1.0) usage is always `Resource`, +/// regardless of its actual type. +/// `Publisher` and `Name` can be inferred from downloaded project, so there is +/// no need for user to provide this info in most cases. This is required +/// only if user disables lock/sync when adding a usage, or if non-root +/// project of git repo is to be used. +#[derive(Eq, Clone, PartialEq, Serialize, Deserialize, Hash, Debug)] +#[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))] +#[serde(rename_all = "camelCase", untagged)] +pub enum InterchangeProjectUsageG { + /// Legacy, from KerML 1.0 spec + Resource { + resource: Iri, // TODO: We should have a fallback for invalid IRIs + #[serde(skip_serializing_if = "Option::is_none")] + version_constraint: Option, // TODO: We should have a fallback for invalid semvers + }, + Url { + url: Iri, + publisher: String, + name: String, + }, + // TODO: assuming this is a relative Unix-style path + // TODO: use proper types + Path { + path: Path, + publisher: String, + name: String, + }, + Git { + git: Iri, + id: GitId, + publisher: String, + name: String, + }, + Index { + publisher: String, + name: String, + version_constraint: VersionReq, + }, + // TODO: is this needed? We don't know what info might be needed for different APIs, + // so it seems premature to include this here, as it would likely be useless + // TODO: change the doc, it seemingly by mistake lists rev/tag/branch here + // Api { + // server: Iri, + // publisher: String, + // name: String, + // project_id: u128, // UUID + // }, } -pub type InterchangeProjectUsageRaw = InterchangeProjectUsageG; + +pub type InterchangeProjectUsageRaw = InterchangeProjectUsageG; pub type InterchangeProjectUsage = - InterchangeProjectUsageG, semver::VersionReq>; + InterchangeProjectUsageG, semver::VersionReq, Utf8UnixPathBuf>; + +impl Display for InterchangeProjectUsageRaw { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => { + write!(f, "IRI `{resource}`")?; + if let Some(vc) = version_constraint { + write!(f, " {vc}")?; + } + } + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => { + write!(f, "`{publisher}/{name}` from URL `{url}`")?; + } + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => { + write!(f, "`{publisher}/{name}` from path `{path}`")?; + } + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => { + write!(f, "`{publisher}/{name}` from git repository `{git}`, {id}")?; + } + InterchangeProjectUsageG::Index { + publisher, + name, + // TODO: version must be chosen at this point even if not provided by user + version_constraint, + } => { + write!(f, "`{publisher}/{name}` ({version_constraint}) from index")?; + } + } + Ok(()) + } +} + +impl InterchangeProjectUsageG { + /// Get the canonical IRI representing this usage. This IRI is not resolvable + /// on its own. + /// This is expensive, don't call repeatedly + pub fn to_lock_usage(&self) -> Usage { + match self { + InterchangeProjectUsageG::Resource { + resource, + version_constraint: _, + } => Usage::from(resource.to_string()), + InterchangeProjectUsageG::Url { + url: _, + publisher, + name, + } => Usage::from(make_identifier_iri(publisher, name)), + InterchangeProjectUsageG::Path { + path: _, + publisher, + name, + } => Usage::from(make_identifier_iri(publisher, name)), + InterchangeProjectUsageG::Git { + git: _, + id: _, + publisher, + name, + } => Usage::from(make_identifier_iri(publisher, name)), + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint: _, + } => Usage::from(make_identifier_iri(publisher, name)), + } + } +} impl InterchangeProjectUsageRaw { + // TODO: consolidate to `try_from()`? pub fn validate(&self) -> Result { - Ok(InterchangeProjectUsage { - resource: fluent_uri::Iri::parse(self.resource.clone()) - .map_err(|(e, val)| InterchangeProjectValidationError::IriParse(val, e))?, - - version_constraint: self - .version_constraint - .as_ref() - .map(|c| { - semver::VersionReq::parse(c).map_err(|e| { - InterchangeProjectValidationError::SemVerConstraintParse(c.to_owned(), e) + let res = match self { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => InterchangeProjectUsage::Resource { + resource: fluent_uri::Iri::parse(resource.to_owned()) + .map_err(|(e, val)| InterchangeProjectValidationError::IriParse(val, e))?, + + version_constraint: version_constraint + .as_ref() + .map(|c| { + semver::VersionReq::parse(c).map_err(|e| { + InterchangeProjectValidationError::SemVerConstraintParse( + c.to_owned(), + e, + ) + }) }) - }) - .transpose()?, - }) + .transpose()?, + }, + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => InterchangeProjectUsage::Url { + url: fluent_uri::Iri::parse(url.to_owned()) + .map_err(|(e, val)| InterchangeProjectValidationError::IriParse(val, e))?, + publisher: publisher.clone(), + name: name.clone(), + }, + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => InterchangeProjectUsage::Path { + // TODO: check that this is a relative Unix path + path: Utf8UnixPathBuf::from(path), + publisher: publisher.clone(), + name: name.clone(), + }, + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => InterchangeProjectUsage::Git { + git: fluent_uri::Iri::parse(git.to_owned()) + .map_err(|(e, val)| InterchangeProjectValidationError::IriParse(val, e))?, + id: id.clone(), + // TODO: No restrictions for now + publisher: publisher.to_owned(), + name: name.to_owned(), + }, + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => InterchangeProjectUsage::Index { + publisher: publisher.to_owned(), + name: name.to_owned(), + version_constraint: semver::VersionReq::parse(version_constraint).map_err(|e| { + InterchangeProjectValidationError::SemVerConstraintParse( + version_constraint.to_owned(), + e, + ) + })?, + }, + }; + + Ok(res) } + + // /// Get the canonical IRI representing this usage. This IRI is not resolvable + // /// on its own. + // /// This is expensive, don't call repeatedly + // pub fn to_lock_usage(&self) -> Usage { + // match self { + // InterchangeProjectUsageG::Resource { + // resource, + // version_constraint: _, + // } => Usage::from(resource.to_owned()), + // InterchangeProjectUsageG::Url { + // url: _, + // publisher, + // name, + // } => Usage::from(make_identifier_iri(publisher, name)), + // InterchangeProjectUsageG::Path { + // path: _, + // publisher, + // name, + // } => Usage::from(make_identifier_iri(publisher, name)), + // InterchangeProjectUsageG::Git { + // git: _, + // id: _, + // publisher, + // name, + // } => Usage::from(make_identifier_iri(publisher, name)), + // InterchangeProjectUsageG::Index { + // publisher, + // name, + // version_constraint: _, + // } => Usage::from(make_identifier_iri(publisher, name)), + // } + // } } impl From for InterchangeProjectUsageRaw { fn from(value: InterchangeProjectUsage) -> InterchangeProjectUsageRaw { - InterchangeProjectUsageRaw { - resource: value.resource.to_string(), - version_constraint: value.version_constraint.map(|x| x.to_string()), + match value { + InterchangeProjectUsage::Resource { + resource, + version_constraint, + } => InterchangeProjectUsageRaw::Resource { + resource: resource.into_string(), + version_constraint: version_constraint.map(|x| x.to_string()), + }, + InterchangeProjectUsage::Url { + url, + publisher, + name, + } => InterchangeProjectUsageRaw::Url { + url: url.into_string(), + publisher, + name, + }, + InterchangeProjectUsage::Path { + path, + publisher, + name, + } => InterchangeProjectUsageRaw::Path { + path: path.into_string(), + publisher, + name, + }, + InterchangeProjectUsage::Git { + git, + id, + publisher, + name, + } => InterchangeProjectUsageRaw::Git { + git: git.into_string(), + id, + publisher, + name, + }, + InterchangeProjectUsage::Index { + publisher, + name, + version_constraint, + } => InterchangeProjectUsageRaw::Index { + publisher, + name, + version_constraint: version_constraint.to_string(), + }, } } } -impl From, semver::VersionReq>> - for InterchangeProjectUsageG -{ - fn from(value: InterchangeProjectUsageG, semver::VersionReq>) -> Self { - InterchangeProjectUsageG { - resource: value.resource.to_string(), - version_constraint: value.version_constraint, - } - } -} +// impl From, semver::VersionReq>> +// for InterchangeProjectUsageG +// { +// fn from(value: InterchangeProjectUsageG, semver::VersionReq>) -> Self { +// InterchangeProjectUsageG { +// resource: value.resource.to_string(), +// version_constraint: value.version_constraint, +// } +// } +// } impl TryFrom for InterchangeProjectUsage { type Error = InterchangeProjectValidationError; @@ -83,7 +420,7 @@ impl TryFrom for InterchangeProjectUsage { #[derive(Eq, Clone, PartialEq, Serialize, Deserialize, Debug)] #[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))] #[serde(rename_all = "camelCase")] -pub struct InterchangeProjectInfoG { +pub struct InterchangeProjectInfoG { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] @@ -108,12 +445,17 @@ pub struct InterchangeProjectInfoG { #[serde(default)] pub topic: Vec, - pub usage: Vec>, + // pub usage: Vec>, + pub usage: Vec>, } -pub type InterchangeProjectInfoRaw = InterchangeProjectInfoG; -pub type InterchangeProjectInfo = - InterchangeProjectInfoG, semver::Version, semver::VersionReq>; +pub type InterchangeProjectInfoRaw = InterchangeProjectInfoG; +pub type InterchangeProjectInfo = InterchangeProjectInfoG< + fluent_uri::Iri, + semver::Version, + semver::VersionReq, + Utf8UnixPathBuf, +>; impl From for InterchangeProjectInfoRaw { fn from(value: InterchangeProjectInfo) -> Self { @@ -135,8 +477,8 @@ impl From for InterchangeProjectInfoRaw { } } -impl - InterchangeProjectInfoG +impl + InterchangeProjectInfoG { pub fn minimal(name: String, version: Version) -> Self { InterchangeProjectInfoG { @@ -162,15 +504,19 @@ impl /// Note that sysand will never add multiple usages of the same resource /// to the project, but it does tolerate such usages. // TODO: the spec does not say anything about this and should be clarified - pub fn pop_usage(&mut self, resource: &Iri) -> Vec> { + pub fn pop_usage( + &mut self, + resource: &Iri, + ) -> Vec> { self.usage - .extract_if( - .., - |InterchangeProjectUsageG { - resource: this_resource, - .. - }| this_resource == resource, - ) + .extract_if(.., |u| match u { + // TODO: how to match here? Simplest would be to require the same info as for + // adding, but that is way overkill and annoying to use. Otherwise we'd need + // some sort of separate "matcher" type that allows wildcarding everything + // apart from: any sort of IRI/URL, publisher+name. + // Then how to allow providing version (constraint) and possibly other matchers? + _ => todo!("this needs new design of pop_usage and CLI surface"), + }) .collect() } } diff --git a/core/src/project/any.rs b/core/src/project/any.rs index 6a0f0c26..72847d26 100644 --- a/core/src/project/any.rs +++ b/core/src/project/any.rs @@ -106,8 +106,13 @@ impl AnyProject { } .to_tokio_sync(runtime), )), - Source::RemoteGit { remote_git } => Ok(AnyProject::RemoteGit( - GixDownloadedProject::new(remote_git).map_err(TryFromSourceError::RemoteGit)?, + Source::RemoteGit { + remote_git, + rev, + path, + } => Ok(AnyProject::RemoteGit( + GixDownloadedProject::new(remote_git, Some(rev), path) + .map_err(TryFromSourceError::RemoteGit)?, )), _ => Err(TryFromSourceError::UnsupportedSource(format!("{source:?}"))), } diff --git a/core/src/project/gix_git_download.rs b/core/src/project/gix_git_download.rs index 1fc7d64f..40d20718 100644 --- a/core/src/project/gix_git_download.rs +++ b/core/src/project/gix_git_download.rs @@ -1,7 +1,18 @@ -use std::num::NonZero; +use std::{ + num::{NonZero, NonZeroU32}, + sync::atomic::AtomicBool, +}; use camino::Utf8PathBuf; -use gix::{prepare_clone, remote::fetch::Shallow}; +use camino_tempfile::Utf8TempDir; +use gix::{ + prepare_clone, + progress::{self, Discard}, + remote::{ + Direction, + fetch::{self, Shallow}, + }, +}; use thiserror::Error; use crate::{ @@ -20,6 +31,12 @@ use super::utils::{FsIoError, ProjectDeserializationError, ProjectSerializationE #[derive(Debug)] pub struct GixDownloadedProject { pub url: gix::Url, + /// Before cloning: git rev to clone. + /// After cloning: actual git rev, will match requested if given, + /// otherwise the latest rev of the default branch. + rev: Option, + /// path within the cloned repo where project resides + path: Option, tmp_dir: camino_tempfile::Utf8TempDir, inner: LocalSrcProject, } @@ -28,6 +45,10 @@ pub struct GixDownloadedProject { pub enum GixDownloadedError { #[error("git clone from `{0}` failed: {1}")] Clone(String, Box), + #[error("git bare repo init at `{0}` failed: {1}")] + Init(String, Box), + #[error("git remote `{0}` init failed: {1}")] + RemoteInit(String, Box), #[error("failed to parse git URL `{0}`: {1}")] UrlParse(Box, Box), #[error(transparent)] @@ -77,34 +98,217 @@ impl From for GixDownloadedError { } impl GixDownloadedProject { - pub fn new>(url: S) -> Result { + pub fn new>( + url: S, + rev: Option, + path: Option, + ) -> Result { let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; + let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; + // Append path inside the repo, as it will be cloned to the temp dir + if let Some(p) = &path { + canonical_temp = canonical_temp.join(p); + } + let downloaded_project = LocalSrcProject { + nominal_path: None, + project_path: canonical_temp, + }; Ok(GixDownloadedProject { url: gix::url::parse(url.as_ref().into()) .map_err(|e| GixDownloadedError::UrlParse(url.as_ref().into(), Box::new(e)))?, - inner: LocalSrcProject { - nominal_path: None, - project_path: wrapfs::canonicalize(tmp_dir.path())?, - }, + rev, + path, + inner: downloaded_project, tmp_dir, }) } - fn ensure_downloaded(&self) -> Result<(), GixDownloadedError> { - if !self.tmp_dir.path().join(".git").is_dir() { - let prepared_clone = prepare_clone(self.url.clone(), self.tmp_dir.path()) - .map_err(|e| GixDownloadedError::Clone(self.url.to_string(), Box::new(e)))?; + /// Immediately clone the repo and try to find the project publisher/name + pub fn new_download>( + url: S, + rev: Option, + publisher: impl AsRef, + name: impl AsRef, + ) -> Result { + let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; + Self::download_to_temp(&tmp_dir, url, rev.as_ref())?; + + let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; + // Append path inside the repo, as it will be cloned to the temp dir + if let Some(p) = &path { + canonical_temp = canonical_temp.join(p); + } + let downloaded_project = LocalSrcProject { + nominal_path: None, + project_path: canonical_temp, + }; + Ok(GixDownloadedProject { + url: gix::url::parse(url.as_ref().into()) + .map_err(|e| GixDownloadedError::UrlParse(url.as_ref().into(), Box::new(e)))?, + rev, + path, + inner: downloaded_project, + tmp_dir, + }) + } + + /// Clone the repo, the checkout `rev` (which must be a commit SHA1/256). + /// Adapted from gitoxide `main_worktree()`: + /// https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 + fn download_to_temp( + tmp_dir: &Utf8TempDir, + url: &str, + rev: Option<&str>, + ) -> Result<(), GixDownloadedError> { + if let Some(rev) = rev { + // Fetch all objects without checking out any files + let (repo, _) = gix::prepare_clone(url.clone(), tmp_dir.path()) + .unwrap() + .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .unwrap(); + + // Resolve the SHA to a commit, then get its tree + // We already checked that this is a valid SHA1/256 + let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); + let tree_id = repo + .find_object(commit_id) + .unwrap() + .into_commit() + .tree_id() + .unwrap() + .detach(); + + // Build an index from that specific tree + let mut index = repo.index_from_tree(&tree_id).unwrap(); + + // Use IdMapping as attribute source: workdir is empty, read attrs from ODB + let mut opts = repo + .checkout_options(gix_worktree::stack::state::attributes::Source::IdMapping) + .unwrap(); + opts.destination_is_initially_empty = true; + + gix_worktree_state::checkout( + &mut index, + tmp_dir.path(), + repo.objects.clone().into_arc().unwrap(), + &gix::progress::Discard, + &gix::progress::Discard, + &gix::interrupt::IS_INTERRUPTED, + opts, + ) + .unwrap(); + + index.write(Default::default()).unwrap(); + } else { + let prepared_clone = prepare_clone(url.clone(), tmp_dir.path()) + .map_err(|e| GixDownloadedError::Clone(url.to_string(), Box::new(e)))?; let (mut prepare_checkout, _) = prepared_clone .with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap())) .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| GixDownloadedError::Fetch(self.url.to_string(), Box::new(e)))?; + .map_err(|e| GixDownloadedError::Fetch(url.to_string(), Box::new(e)))?; let (_repo, _) = prepare_checkout .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| { - GixDownloadedError::Checkout(self.tmp_dir.to_path_buf(), Box::new(e)) - })?; + .map_err(|e| GixDownloadedError::Checkout(tmp_dir.to_path_buf(), Box::new(e)))?; + + // TODO: get last commit SHA + } + + Ok(()) + } + + // TODO: be more efficient. Git repos should be in user-level cache + // and updated when needed + fn ensure_downloaded(&self) -> Result<(), GixDownloadedError> { + if !self.tmp_dir.path().join(".git").is_dir() { + // Try downloading only the rev we need + // let repo = gix::init_bare(&self.tmp_dir.path()).map_err(|e| { + // GixDownloadedError::Init(self.tmp_dir.path().as_str().to_owned(), e.into()) + // })?; + // let mut remote = repo + // .remote_at(self.url.clone()) + // .map_err(|e| GixDownloadedError::RemoteInit(self.url.to_string(), e.into()))?; + // // + // // let target_commit = + // // gix::ObjectId::from_hex(self.rev.as_bytes()).expect("BUG: unvalidated git rev"); + // // The exact name here doesn't matter, we'll only clone a single commit anyway. + // remote + // .replace_refspecs( + // [format!("{}:FETCH_HEAD", self.rev).as_str()], + // Direction::Fetch, + // ) + // .unwrap(); + // // TODO: proper error reporting + // let outcome = remote + // .connect(Direction::Fetch) + // .unwrap() + // .prepare_fetch(progress::Discard, Default::default()) + // .unwrap() + // .with_shallow(Shallow::DepthAtRemote(NonZeroU32::new(1).unwrap())) + // .receive(progress::Discard, &gix::interrupt::IS_INTERRUPTED) + // .unwrap(); + // // TODO: check that it actually fetched what we want + + // Clone the repo, the checkout `rev` (which must be a commit SHA1/256). + // Adapted from gitoxide `main_worktree()`: + // https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 + + if let Some(rev) = &self.rev { + // Fetch all objects without checking out any files + let (repo, _) = gix::prepare_clone(self.url.clone(), self.tmp_dir.path()) + .unwrap() + .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .unwrap(); + + // Resolve the SHA to a commit, then get its tree + // We already checked that this is a valid SHA1/256 + let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); + let tree_id = repo + .find_object(commit_id) + .unwrap() + .into_commit() + .tree_id() + .unwrap() + .detach(); + + // Build an index from that specific tree + let mut index = repo.index_from_tree(&tree_id).unwrap(); + + // Use IdMapping as attribute source: workdir is empty, read attrs from ODB + let mut opts = repo + .checkout_options(gix_worktree::stack::state::attributes::Source::IdMapping) + .unwrap(); + opts.destination_is_initially_empty = true; + + gix_worktree_state::checkout( + &mut index, + self.tmp_dir.path(), + repo.objects.clone().into_arc().unwrap(), + &gix::progress::Discard, + &gix::progress::Discard, + &gix::interrupt::IS_INTERRUPTED, + opts, + ) + .unwrap(); + + index.write(Default::default()).unwrap(); + } else { + let prepared_clone = prepare_clone(self.url.clone(), self.tmp_dir.path()) + .map_err(|e| GixDownloadedError::Clone(self.url.to_string(), Box::new(e)))?; + + let (mut prepare_checkout, _) = prepared_clone + .with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap())) + .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .map_err(|e| GixDownloadedError::Fetch(self.url.to_string(), Box::new(e)))?; + let (_repo, _) = prepare_checkout + .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .map_err(|e| { + GixDownloadedError::Checkout(self.tmp_dir.to_path_buf(), Box::new(e)) + })?; + + // TODO: get last commit SHA + } } Ok(()) @@ -142,9 +346,23 @@ impl ProjectRead for GixDownloadedProject { Ok(FileWithLifetime::new(self.inner.read_source(path)?)) } + // TODO: find a less hacky way to provide the SHA here, it should be saved when + // repo is cloned fn sources(&self, _ctx: &ProjectContext) -> Result, Self::Error> { + // TODO: find a better way to obtain required SHA + let rev = if let Some(rev) = &self.rev { + rev.to_owned() + } else { + // If desired rev is not provided, use HEAD commit (i.e. current checked-out state) + let repo = gix::open(self.tmp_dir.path()).unwrap(); + + repo.head_commit().unwrap().id().to_string() + }; + Ok(vec![Source::RemoteGit { remote_git: self.url.to_string(), + rev, + path: self.path.clone(), }]) } } @@ -238,6 +456,17 @@ mod tests { .assert() .success(); + let hex_commit_sha = Command::new("git") + .arg("rev-parse") + .arg("HEAD") + .current_dir(repo_dir.path()) + .output()? + .assert() + .success() + .get_output() + .stdout + .to_owned(); + // NOTE: Gix does not support the "dumb" HTTP protocol // let free_port = port_check::free_local_port().unwrap().to_string(); @@ -259,7 +488,11 @@ mod tests { // prefix that gix cannot parse as a valid file URL. Strip it. let path = canonical.to_str().unwrap(); let path = path.strip_prefix(r"\\?\").unwrap_or(path); - let project = GixDownloadedProject::new(format!("file://{path}"))?; + let project = GixDownloadedProject::new( + format!("file://{path}"), + Some(String::from_utf8(hex_commit_sha).unwrap()), + None, + )?; let (Some(info), Some(meta)) = project.get_project()? else { panic!("expected info and meta"); diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index b74bc0f8..e3d06bbf 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -4,12 +4,131 @@ use std::io::{self, Read}; use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; +use fluent_uri::pct_enc::{EString, encoder::IData}; use serde::Deserialize; use thiserror::Error; use typed_path::Utf8UnixPathBuf; #[cfg(feature = "filesystem")] use zip::{self, result::ZipError}; +use std::io::{self, Read}; + +// TODO: use newtype for identifier IRI +pub fn make_identifier_iri(publisher: impl AsRef, name: impl AsRef) -> String { + let publisher = publisher.as_ref(); + let name = name.as_ref(); + debug_assert!(!publisher.is_empty()); + debug_assert!(!name.is_empty()); + + match (check_purl_publisher(publisher), check_purl_name(name)) { + (PurlPartForm::Purl, PurlPartForm::Purl) => { + format!("pkg:sysand/{publisher}/{name}") + } + (PurlPartForm::MakePurl, PurlPartForm::Purl) => { + let mut res = "pkg:sysand/".to_owned(); + normalize_purl_char(publisher, &mut res); + res.push('/'); + res.push_str(name); + res + } + (PurlPartForm::Purl, PurlPartForm::MakePurl) => { + let mut res = "pkg:sysand/".to_owned(); + res.push_str(publisher); + res.push('/'); + normalize_purl_char(name, &mut res); + res + } + (PurlPartForm::MakePurl, PurlPartForm::MakePurl) => { + let mut res = "pkg:sysand/".to_owned(); + normalize_purl_char(publisher, &mut res); + res.push('/'); + normalize_purl_char(name, &mut res); + res + } + _ => { + let mut enc_pub = EString::::new(); + enc_pub.encode_str::(publisher); + let mut enc_name = EString::::new(); + enc_name.encode_str::(publisher); + format!("urn:sysand:{enc_pub}/{enc_name}") + } + } +} + +/// Normalize publisher or name for PURL: +/// - lowercase +/// - replace spaces with `-` +/// +/// Assumes that preconditions are satisfied +fn normalize_purl_char(part: &str, buf: &mut String) { + for c in part.chars() { + if c == ' ' { + buf.push('-'); + } else { + buf.push(c.to_ascii_lowercase()); + } + } +} + +/// For of a PURL part: publisher or name +#[derive(Clone, Copy, Debug)] +enum PurlPartForm { + /// Valid form to use in PURL + Purl, + /// Can be turned into form usable in PURL + MakePurl, + /// Not usable for PURL + Arbitrary, +} + +fn check_purl_publisher(publisher: &str) -> PurlPartForm { + if publisher.len() < 3 + || publisher.len() > 50 + || !publisher.as_bytes()[0].is_ascii_alphanumeric() + || !publisher.as_bytes().last().unwrap().is_ascii_alphanumeric() + { + return PurlPartForm::Arbitrary; + } + let mut res = PurlPartForm::Purl; + + for &[c1, c2] in publisher.as_bytes().array_windows() { + if (!c1.is_ascii_alphanumeric() && c1 != b'-' && c1 != b' ') + || (c1 == b' ' || c1 == b'-') && (c2 == b' ' || c2 == b'-') + { + return PurlPartForm::Arbitrary; + } + if c1.is_ascii_uppercase() || c1 == b' ' { + res = PurlPartForm::MakePurl; + } + } + + res +} + +fn check_purl_name(name: &str) -> PurlPartForm { + if name.len() < 3 + || name.len() > 50 + || !name.as_bytes()[0].is_ascii_alphanumeric() + || !name.as_bytes().last().unwrap().is_ascii_alphanumeric() + { + return PurlPartForm::Arbitrary; + } + let mut res = PurlPartForm::Purl; + + for &[c1, c2] in name.as_bytes().array_windows() { + if (!c1.is_ascii_alphanumeric() && c1 != b'-' && c1 != b' ' && c1 != b'.') + || (c1 == b' ' || c1 == b'-' || c1 == b'.') && (c2 == b' ' || c2 == b'-' || c2 == b'.') + { + return PurlPartForm::Arbitrary; + } + if c1.is_ascii_uppercase() || c1 == b' ' { + res = PurlPartForm::MakePurl; + } + } + + res +} + /// A file that is guaranteed to exist as long as the lifetime. /// Intended to be used with temporary files that are automatically /// deleted; in this case, the lifetime `'a` is the lifetime of the diff --git a/core/src/resolve/gix_git.rs b/core/src/resolve/gix_git.rs index 18f5a738..47c85501 100644 --- a/core/src/resolve/gix_git.rs +++ b/core/src/resolve/gix_git.rs @@ -59,8 +59,12 @@ impl ResolveRead for GitResolver { Ok(ResolutionOutcome::Resolved(std::iter::once( // TODO: use trim_prefix() once it's stable - GixDownloadedProject::new(uri.as_str().strip_prefix("git+").unwrap_or(uri.as_str())) - .map_err(|e| e.into()), + GixDownloadedProject::new( + uri.as_str().strip_prefix("git+").unwrap_or(uri.as_str()), + None, + None, + ) + .map_err(|e| e.into()), ))) } diff --git a/core/src/resolve/mod.rs b/core/src/resolve/mod.rs index d02903e3..b2c0b988 100644 --- a/core/src/resolve/mod.rs +++ b/core/src/resolve/mod.rs @@ -49,6 +49,8 @@ impl ResolutionOutcome { } } +/// This is only ussed to resolve "resource" IRIs, new style usages +/// go directly through their specific resolvers. pub trait ResolveRead { type Error: ErrorBound; diff --git a/core/src/solve/pubgrub.rs b/core/src/solve/pubgrub.rs index 7deafd34..33a801d3 100644 --- a/core/src/solve/pubgrub.rs +++ b/core/src/solve/pubgrub.rs @@ -14,7 +14,10 @@ use std::{ use thiserror::Error; use crate::{ - model::{InterchangeProjectInfo, InterchangeProjectMetadataRaw, InterchangeProjectUsage}, + model::{ + InterchangeProjectInfo, InterchangeProjectMetadataRaw, InterchangeProjectUsage, + InterchangeProjectUsageG, + }, project::ProjectRead, resolve::ResolveRead, }; @@ -286,53 +289,93 @@ fn compute_deps( let mut depmap: HashMap = pubgrub::Map::default(); for usage in usages { - if let Some(constraint) = &usage.version_constraint { - let mut valid_candidates = HashSet::new(); - - let mut found_versions = Vec::new(); - for (i, (candidate_info, _)) in resolve_candidates(resolver, &usage.resource, cache)? - .iter() - .enumerate() - { - found_versions.push(candidate_info.version.clone()); - if constraint.matches(&candidate_info.version) { - valid_candidates.insert(i); - } - } - if valid_candidates.is_empty() { - let mut versions = String::new(); - // `found_versions` must contain at least one element - write!(versions, "`{}`", found_versions[0]).unwrap(); - for v in &found_versions[1..] { - write!(versions, ", `{}`", v).unwrap(); - } - return Err(InternalSolverError::VersionNotAvailable(format!( - "project `{}`\n\ + match usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => { + if let Some(constraint) = version_constraint { + let mut valid_candidates = HashSet::new(); + + let mut found_versions = Vec::new(); + for (i, (candidate_info, _)) in resolve_candidates(resolver, resource, cache)? + .iter() + .enumerate() + { + found_versions.push(candidate_info.version.clone()); + if constraint.matches(&candidate_info.version) { + valid_candidates.insert(i); + } + } + if valid_candidates.is_empty() { + let mut versions = String::new(); + // `found_versions` must contain at least one element + write!(versions, "`{}`", found_versions[0]).unwrap(); + for v in &found_versions[1..] { + write!(versions, ", `{}`", v).unwrap(); + } + return Err(InternalSolverError::VersionNotAvailable(format!( + "project `{}`\n\ was found, but the requested version constraint `{}`\n\ was not satisfied by any of the found versions:\n\ {}", - usage.resource, constraint, versions - ))); - } + resource, constraint, versions + ))); + } - depmap.insert( - DependencyIdentifier::Remote(usage.resource.clone()), - DiscreteHashSet::Finite(valid_candidates), - ); - } else { - // Check that the project can be found - resolve_candidates(resolver, &usage.resource, cache)?; - // TODO: reenable this when it's fixed to give better error messages - // https://github.com/pubgrub-rs/pubgrub/pull/216 - // match resolve_candidates(resolver, &usage.resource, cache) { - // Ok(_) => (), - // Err(err) => return Ok(pubgrub::Dependencies::Unavailable(err.to_string())), - // }; - - depmap.insert( - DependencyIdentifier::Remote(usage.resource.clone()), - DiscreteHashSet::empty().complement(), - ); + depmap.insert( + DependencyIdentifier::Remote(resource.clone()), + DiscreteHashSet::Finite(valid_candidates), + ); + } else { + // Check that the project can be found + resolve_candidates(resolver, resource, cache)?; + // TODO: reenable this when it's fixed to give better error messages + // https://github.com/pubgrub-rs/pubgrub/pull/216 + // match resolve_candidates(resolver, &usage.resource, cache) { + // Ok(_) => (), + // Err(err) => return Ok(pubgrub::Dependencies::Unavailable(err.to_string())), + // }; + + depmap.insert( + DependencyIdentifier::Remote(resource.clone()), + DiscreteHashSet::empty().complement(), + ); + } + } + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => { + // TODO: use concrete resolver for DEREFERENCEABLE URL, it must also check that publisher/name match + todo!() + } + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => { + // TODO: use concrete resolver for RELATIVE PATH, it must also check that publisher/name match + todo!() + } + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => { + // TODO: use concrete resolver for GIT, it has to find the project in the repo + todo!() + } + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => { + // TODO: use concrete resolver for INDEX + todo!() + } } } @@ -641,7 +684,7 @@ mod tests { topic: vec![], usage: usage .into_iter() - .map(|(d, dv)| InterchangeProjectUsageRaw { + .map(|(d, dv)| InterchangeProjectUsageRaw::Resource { resource: d.to_string(), version_constraint: dv.map(|x| x.to_string()), }) @@ -703,7 +746,7 @@ mod tests { )]); let solution = super::solve( - vec![InterchangeProjectUsage { + vec![InterchangeProjectUsage::Resource { resource: fluent_uri::Iri::parse("urn:kpar:test_version_selection")?.into(), version_constraint: Some(semver::VersionReq::parse(">=2.0.0")?), }], @@ -761,11 +804,11 @@ mod tests { let solution = super::solve( vec![ - InterchangeProjectUsage { + InterchangeProjectUsage::Resource { resource: fluent_uri::Iri::parse("urn:kpar:test_diamond_selection_a")?.into(), version_constraint: Some(semver::VersionReq::parse(">=0.1.0")?), }, - InterchangeProjectUsage { + InterchangeProjectUsage::Resource { resource: fluent_uri::Iri::parse("urn:kpar:test_diamond_selection_b")?.into(), version_constraint: None, }, diff --git a/sysand/src/commands/info.rs b/sysand/src/commands/info.rs index b3076e08..86dadcdc 100644 --- a/sysand/src/commands/info.rs +++ b/sysand/src/commands/info.rs @@ -14,6 +14,7 @@ use sysand_core::{ env::local_directory::DEFAULT_ENV_NAME, model::{ InterchangeProjectChecksumRaw, InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, + InterchangeProjectUsageG, }, project::{ProjectMut, ProjectRead, any::OverrideProject}, resolve::{ @@ -63,14 +64,42 @@ pub fn pprint_interchange_project( } else { println!("Usages:"); for usage in info.usage { - if excluded_iris.contains(&usage.resource) { - continue; - } - print!(" {}", usage.resource); - if let Some(v) = usage.version_constraint { - println!(" ({})", v); - } else { - println!(); + match &usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => { + if excluded_iris.contains(resource) { + continue; + } + print!(" {}", resource); + if let Some(v) = version_constraint { + println!(" ({})", v); + } else { + println!(); + } + } + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => todo!(), } } } @@ -348,12 +377,38 @@ fn apply_get_info( Some( info.usage .into_iter() - .map(|usage| { - if let Some(version_constraint) = usage.version_constraint { - format!("{} ({})", usage.resource, version_constraint) - } else { - usage.resource.clone() + .map(|usage| match usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => { + if let Some(version_constraint) = version_constraint { + format!("{} ({})", resource, version_constraint) + } else { + resource.clone() + } } + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => todo!(), }) .collect(), ), diff --git a/sysand/src/commands/remove.rs b/sysand/src/commands/remove.rs index 9f4fa54e..8368534f 100644 --- a/sysand/src/commands/remove.rs +++ b/sysand/src/commands/remove.rs @@ -7,6 +7,7 @@ use camino::Utf8PathBuf; use sysand_core::{ config::local_fs::{CONFIG_FILE, remove_project_source_from_config}, context::ProjectContext, + model::InterchangeProjectUsageG, remove::do_remove, }; @@ -35,33 +36,86 @@ pub fn command_remove>( let removed = "Removed"; let header = sysand_core::style::get_style_config().header; if let [usage] = usages.as_slice() { - match usage.version_constraint { - Some(ref vc) => { - log::info!( - "{header}{removed:>12}{header:#} `{}` with version constraints `{}`", - &usage.resource, - vc - ); - } - None => { - log::info!("{header}{removed:>12}{header:#} `{}`", &usage.resource,); - } - } - } else { - log::info!("{header}{removed:>12}{header:#}:"); - for usage in usages { - match usage.version_constraint { + match usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => match version_constraint { Some(vc) => { log::info!( - "{:>13} `{}` with version constraints `{}`", - ' ', - &usage.resource, + "{header}{removed:>12}{header:#} `{}` with version constraints `{}`", + resource, vc ); } None => { - log::info!("{:>13} `{}`", ' ', &usage.resource,); + log::info!("{header}{removed:>12}{header:#} `{}`", resource,); } + }, + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => todo!(), + } + } else { + log::info!("{header}{removed:>12}{header:#}:"); + for usage in usages { + match usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => match version_constraint { + Some(vc) => { + log::info!( + "{:>13} `{}` with version constraints `{}`", + ' ', + resource, + vc + ); + } + None => { + log::info!("{:>13} `{}`", ' ', resource,); + } + }, + + InterchangeProjectUsageG::Url { + url, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Path { + path, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Git { + git, + id, + publisher, + name, + } => todo!(), + InterchangeProjectUsageG::Index { + publisher, + name, + version_constraint, + } => todo!(), } } } From a2c556a422c5ddb4dbfff1cb985801d1d571530f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= Date: Wed, 1 Apr 2026 07:34:28 +0300 Subject: [PATCH 3/5] wip MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrius Pukšta --- core/src/env/local_directory/metadata.rs | 13 +- core/src/lock.rs | 6 + core/src/project/any.rs | 6 +- core/src/project/gix_git_download.rs | 469 +++++++++++++++-------- core/src/project/utils.rs | 2 - core/src/resolve/gix_git.rs | 8 +- sysand/src/cli.rs | 16 + sysand/src/commands/add.rs | 22 +- sysand/src/commands/env.rs | 2 +- 9 files changed, 354 insertions(+), 190 deletions(-) diff --git a/core/src/env/local_directory/metadata.rs b/core/src/env/local_directory/metadata.rs index 08d2c8b4..41c2e429 100644 --- a/core/src/env/local_directory/metadata.rs +++ b/core/src/env/local_directory/metadata.rs @@ -41,11 +41,7 @@ impl Lock { let mut metadata = EnvMetadata::default(); for (project, storage) in resolved_projects { - let usages = project - .usages - .iter() - .map(|usage| usage.resource.clone()) - .collect(); + let usages = project.usages.into_iter().map(Into::into).collect(); if let Some(storage) = storage { let project_path = wrapfs::canonicalize(storage.root_path())?; @@ -213,7 +209,12 @@ impl EnvMetadata { .expect("expected nominal path for project") .to_unix_path_buf(), identifiers, - usages: info.usage.into_iter().map(|u| u.resource).collect(), + usages: info + .usage + .into_iter() + // TODO: be more efficient, minimize calls to to_lock_usage() + .map(|u| u.to_lock_usage().into()) + .collect(), editable, workspace, }; diff --git a/core/src/lock.rs b/core/src/lock.rs index bf6c5017..1924c8d7 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -726,6 +726,12 @@ impl From for Usage { } } +impl From for String { + fn from(value: Usage) -> Self { + value.0 + } +} + impl From<&str> for Usage { fn from(value: &str) -> Self { Self(value.to_owned()) diff --git a/core/src/project/any.rs b/core/src/project/any.rs index 72847d26..f1a2afcc 100644 --- a/core/src/project/any.rs +++ b/core/src/project/any.rs @@ -17,7 +17,7 @@ use crate::{ project::{ AsSyncProjectTokio, ProjectRead, ProjectReadAsync, editable::EditableProject, - gix_git_download::{GixDownloadedError, GixDownloadedProject}, + gix_git_download::{GixDownloadedError, GixDownloadedProject, GixDownloadedProjectExact}, local_kpar::LocalKParProject, local_src::LocalSrcProject, reference::ProjectReference, @@ -35,7 +35,7 @@ pub enum AnyProject { LocalKpar(LocalKParProject), RemoteSrc(AsSyncProjectTokio>), RemoteKpar(AsSyncProjectTokio>), - RemoteGit(GixDownloadedProject), + RemoteGit(GixDownloadedProjectExact), } #[derive(Error, Debug)] @@ -111,7 +111,7 @@ impl AnyProject { rev, path, } => Ok(AnyProject::RemoteGit( - GixDownloadedProject::new(remote_git, Some(rev), path) + GixDownloadedProjectExact::new_download(remote_git, rev, path) .map_err(TryFromSourceError::RemoteGit)?, )), _ => Err(TryFromSourceError::UnsupportedSource(format!("{source:?}"))), diff --git a/core/src/project/gix_git_download.rs b/core/src/project/gix_git_download.rs index 40d20718..fa936f2a 100644 --- a/core/src/project/gix_git_download.rs +++ b/core/src/project/gix_git_download.rs @@ -3,7 +3,7 @@ use std::{ sync::atomic::AtomicBool, }; -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; use camino_tempfile::Utf8TempDir; use gix::{ prepare_clone, @@ -13,7 +13,9 @@ use gix::{ fetch::{self, Shallow}, }, }; +use serde::Deserialize; use thiserror::Error; +use walkdir::WalkDir; use crate::{ context::ProjectContext, @@ -28,15 +30,79 @@ use crate::{ use super::utils::{FsIoError, ProjectDeserializationError, ProjectSerializationError, wrapfs}; +// easyfind2 +// +// Uses of GixDownloadedProject: +// - resolver candidate project (i.e. legacy IRI) (unknown if remote exists/is a git repo). Known info: +// - URL +// Known after constructor: nothing new (should not clone in constructor) +// Needed functionality: +// - clone repo +// - impl ProjectRead -- so it's immutable when the actual repo clone happens and so cannot fill its fields +// - get sufficient info for lockfile: rev +// Need to carry: +// - repo URL (constructor does not clone) +// - path in repo +// - exact rev +// - sync. Known info: +// - URL, +// - path in repo, +// - rev +// Known after constructor (should clone in constructor, and therefore +// carry around info/meta): +// - (optional) publisher/name +// Needed functionality: +// - clone repo +// - impl ProjectRead +// Need to carry: +// - info/meta (info already read in lockfile, so might as well read meta) +// - path in repo +// - exact rev +// - new style usage. Known info: +// - URL +// - publisher +// - name +// - rev (optional, if none then assume latest) +// Known after constructor (should clone in constructor and therefore carry around info/meta): +// - path in repo +// - exact rev +// - (optional) publisher/name +// Needed functionality: +// - clone repo +// - impl ProjectRead +// Need to carry: +// - url (for lockfile) +// - info/meta (info already read in lockfile, so might as well read meta) +// - path in repo +// - exact rev +// +// Lockfile generation needs: +// - project info/meta +// - exact rev +// - path in repo (if any) + #[derive(Debug)] pub struct GixDownloadedProject { pub url: gix::Url, /// Before cloning: git rev to clone. /// After cloning: actual git rev, will match requested if given, /// otherwise the latest rev of the default branch. - rev: Option, + // rev: Option, /// path within the cloned repo where project resides - path: Option, + // path: Option, + tmp_dir: camino_tempfile::Utf8TempDir, + inner: LocalSrcProject, +} + +#[derive(Debug)] +pub struct GixDownloadedProjectExact { + url: String, + /// Git rev of the project. Will match the given one or be the + /// latest on the default branch if none is given + rev: String, + /// Path within the cloned repo where project resides. + /// If None, project is at root. + path: Option, tmp_dir: camino_tempfile::Utf8TempDir, inner: LocalSrcProject, } @@ -45,10 +111,10 @@ pub struct GixDownloadedProject { pub enum GixDownloadedError { #[error("git clone from `{0}` failed: {1}")] Clone(String, Box), - #[error("git bare repo init at `{0}` failed: {1}")] - Init(String, Box), - #[error("git remote `{0}` init failed: {1}")] - RemoteInit(String, Box), + // #[error("git bare repo init at `{0}` failed: {1}")] + // Init(String, Box), + // #[error("git remote `{0}` init failed: {1}")] + // RemoteInit(String, Box), #[error("failed to parse git URL `{0}`: {1}")] UrlParse(Box, Box), #[error(transparent)] @@ -69,6 +135,16 @@ pub enum GixDownloadedError { {0}" )] ImpossibleRelativePath(#[from] RelativizePathError), + #[error( + "project with publisher `{publisher}` and name `{name}`\n\ + not found in `{repo_url}` at rev {rev}" + )] + ProjectNotFound { + repo_url: Box, + rev: Box, + publisher: Box, + name: Box, + }, #[error("{0}")] Other(String), } @@ -97,48 +173,135 @@ impl From for GixDownloadedError { } } -impl GixDownloadedProject { - pub fn new>( +impl GixDownloadedProjectExact { + /// Immediately clone the repo and try to find the project publisher/name + pub fn new_download_find>( url: S, rev: Option, + publisher: impl AsRef, + name: impl AsRef, + ) -> Result { + let url = url.as_ref(); + let publisher = publisher.as_ref(); + let name = name.as_ref(); + let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; + let repo = download_repo_to_temp(&tmp_dir, url, rev.as_deref())?; + + let rev = rev.unwrap_or_else(|| repo.head_commit().unwrap().id().to_string()); + + // TODO: find specified project in repo and convert to path + // TODO: Since gix provides a way to iterate over non-checked-out files, + // checkout may not be necessary. + + // Check every `.project.json` file + for entry in WalkDir::new(tmp_dir.path()) + .into_iter() + .filter_entry(|entry| entry.file_name() == ".git") + { + match entry { + Ok(entry) => { + if !entry.file_type().is_file() || entry.path().ends_with(".project.json") { + continue; + } + let Some(path) = entry.path().to_str() else { + log::debug!( + "ignoring path `{}` as it contains invalid Unicode", + entry.path().display() + ); + continue; + }; + let info: InterchangeProjectInfoRaw = + match serde_json::from_reader(wrapfs::File::open(path)?) { + Ok(info) => info, + Err(e) => { + log::debug!( + "ignoring file `{}` due to error: {e}", + entry.path().display() + ); + continue; + } + }; + + if info.publisher.as_deref() == Some(publisher) && info.name == name { + // FOUND + // let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; + // Append path inside the repo, as it will be cloned to the temp dir + let downloaded_project = LocalSrcProject { + nominal_path: None, + project_path: entry.path().parent().unwrap().to_str().unwrap().into(), + }; + let path_in_repo = downloaded_project + .project_path + .strip_prefix(tmp_dir.path()) + .unwrap(); + return Ok(GixDownloadedProjectExact { + url: url.to_owned(), + rev, + path: if path_in_repo.as_str().is_empty() { + None + } else { + Some(path_in_repo.to_owned()) + }, + inner: downloaded_project, + tmp_dir, + }); + } + } + Err(e) => { + log::debug!("skipping path due to error: {e}"); + } + } + } + + Err(GixDownloadedError::ProjectNotFound { + repo_url: url.into(), + rev: rev.into(), + publisher: publisher.into(), + name: name.into(), + }) + } + + /// `path` must be relative path inside repo + pub fn new_download>( + url: S, + rev: String, path: Option, - ) -> Result { + ) -> Result { + let url = url.as_ref(); let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; + let _repo = download_repo_to_temp(&tmp_dir, url, Some(&rev))?; - let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; - // Append path inside the repo, as it will be cloned to the temp dir - if let Some(p) = &path { - canonical_temp = canonical_temp.join(p); - } let downloaded_project = LocalSrcProject { nominal_path: None, - project_path: canonical_temp, + project_path: if let Some(p) = &path { + tmp_dir.path().join(p) + } else { + tmp_dir.path().into() + }, }; - Ok(GixDownloadedProject { - url: gix::url::parse(url.as_ref().into()) - .map_err(|e| GixDownloadedError::UrlParse(url.as_ref().into(), Box::new(e)))?, + Ok(GixDownloadedProjectExact { + url: url.to_owned(), rev, - path, + path: path.map(Into::into), inner: downloaded_project, tmp_dir, }) } +} - /// Immediately clone the repo and try to find the project publisher/name - pub fn new_download>( +impl GixDownloadedProject { + pub fn new>( url: S, - rev: Option, - publisher: impl AsRef, - name: impl AsRef, + // rev: Option, + // path: Option, ) -> Result { let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; - Self::download_to_temp(&tmp_dir, url, rev.as_ref())?; let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; // Append path inside the repo, as it will be cloned to the temp dir - if let Some(p) = &path { - canonical_temp = canonical_temp.join(p); - } + // if let Some(p) = &path { + // canonical_temp = canonical_temp.join(p); + // } let downloaded_project = LocalSrcProject { nominal_path: None, project_path: canonical_temp, @@ -146,78 +309,13 @@ impl GixDownloadedProject { Ok(GixDownloadedProject { url: gix::url::parse(url.as_ref().into()) .map_err(|e| GixDownloadedError::UrlParse(url.as_ref().into(), Box::new(e)))?, - rev, - path, + // rev, + // path, inner: downloaded_project, tmp_dir, }) } - /// Clone the repo, the checkout `rev` (which must be a commit SHA1/256). - /// Adapted from gitoxide `main_worktree()`: - /// https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 - fn download_to_temp( - tmp_dir: &Utf8TempDir, - url: &str, - rev: Option<&str>, - ) -> Result<(), GixDownloadedError> { - if let Some(rev) = rev { - // Fetch all objects without checking out any files - let (repo, _) = gix::prepare_clone(url.clone(), tmp_dir.path()) - .unwrap() - .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .unwrap(); - - // Resolve the SHA to a commit, then get its tree - // We already checked that this is a valid SHA1/256 - let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); - let tree_id = repo - .find_object(commit_id) - .unwrap() - .into_commit() - .tree_id() - .unwrap() - .detach(); - - // Build an index from that specific tree - let mut index = repo.index_from_tree(&tree_id).unwrap(); - - // Use IdMapping as attribute source: workdir is empty, read attrs from ODB - let mut opts = repo - .checkout_options(gix_worktree::stack::state::attributes::Source::IdMapping) - .unwrap(); - opts.destination_is_initially_empty = true; - - gix_worktree_state::checkout( - &mut index, - tmp_dir.path(), - repo.objects.clone().into_arc().unwrap(), - &gix::progress::Discard, - &gix::progress::Discard, - &gix::interrupt::IS_INTERRUPTED, - opts, - ) - .unwrap(); - - index.write(Default::default()).unwrap(); - } else { - let prepared_clone = prepare_clone(url.clone(), tmp_dir.path()) - .map_err(|e| GixDownloadedError::Clone(url.to_string(), Box::new(e)))?; - - let (mut prepare_checkout, _) = prepared_clone - .with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap())) - .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| GixDownloadedError::Fetch(url.to_string(), Box::new(e)))?; - let (_repo, _) = prepare_checkout - .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| GixDownloadedError::Checkout(tmp_dir.to_path_buf(), Box::new(e)))?; - - // TODO: get last commit SHA - } - - Ok(()) - } - // TODO: be more efficient. Git repos should be in user-level cache // and updated when needed fn ensure_downloaded(&self) -> Result<(), GixDownloadedError> { @@ -254,67 +352,80 @@ impl GixDownloadedProject { // Adapted from gitoxide `main_worktree()`: // https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 - if let Some(rev) = &self.rev { - // Fetch all objects without checking out any files - let (repo, _) = gix::prepare_clone(self.url.clone(), self.tmp_dir.path()) - .unwrap() - .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .unwrap(); - - // Resolve the SHA to a commit, then get its tree - // We already checked that this is a valid SHA1/256 - let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); - let tree_id = repo - .find_object(commit_id) - .unwrap() - .into_commit() - .tree_id() - .unwrap() - .detach(); - - // Build an index from that specific tree - let mut index = repo.index_from_tree(&tree_id).unwrap(); - - // Use IdMapping as attribute source: workdir is empty, read attrs from ODB - let mut opts = repo - .checkout_options(gix_worktree::stack::state::attributes::Source::IdMapping) - .unwrap(); - opts.destination_is_initially_empty = true; - - gix_worktree_state::checkout( - &mut index, - self.tmp_dir.path(), - repo.objects.clone().into_arc().unwrap(), - &gix::progress::Discard, - &gix::progress::Discard, - &gix::interrupt::IS_INTERRUPTED, - opts, - ) - .unwrap(); - - index.write(Default::default()).unwrap(); - } else { - let prepared_clone = prepare_clone(self.url.clone(), self.tmp_dir.path()) - .map_err(|e| GixDownloadedError::Clone(self.url.to_string(), Box::new(e)))?; - - let (mut prepare_checkout, _) = prepared_clone - .with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap())) - .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| GixDownloadedError::Fetch(self.url.to_string(), Box::new(e)))?; - let (_repo, _) = prepare_checkout - .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) - .map_err(|e| { - GixDownloadedError::Checkout(self.tmp_dir.to_path_buf(), Box::new(e)) - })?; - - // TODO: get last commit SHA - } + // TODO: avoid reparsing URL + download_repo_to_temp(&self.tmp_dir, &self.url.to_string(), None)?; } Ok(()) } } +/// Clone the repo, the checkout `rev` (which must be a commit SHA1/256). +/// Adapted from gitoxide `main_worktree()`: +/// https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 +fn download_repo_to_temp( + tmp_dir: &Utf8TempDir, + url: &str, + rev: Option<&str>, +) -> Result { + let repo = if let Some(rev) = rev { + // Fetch all objects without checking out any files + let (repo, _) = gix::prepare_clone(url, tmp_dir.path()) + .unwrap() + .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .unwrap(); + + // Resolve the SHA to a commit, then get its tree + // We already checked that this is a valid SHA1/256 + let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); + let tree_id = repo + .find_object(commit_id) + .unwrap() + .into_commit() + .tree_id() + .unwrap() + .detach(); + + // Build an index from that specific tree + let mut index = repo.index_from_tree(&tree_id).unwrap(); + + // Use IdMapping as attribute source: workdir is empty, read attrs from ODB + let mut opts = repo + .checkout_options(gix_worktree::stack::state::attributes::Source::IdMapping) + .unwrap(); + opts.destination_is_initially_empty = true; + + gix_worktree_state::checkout( + &mut index, + tmp_dir.path(), + repo.objects.clone().into_arc().unwrap(), + &gix::progress::Discard, + &gix::progress::Discard, + &gix::interrupt::IS_INTERRUPTED, + opts, + ) + .unwrap(); + + index.write(Default::default()).unwrap(); + repo + } else { + let prepared_clone = prepare_clone(url.clone(), tmp_dir.path()) + .map_err(|e| GixDownloadedError::Clone(url.to_string(), Box::new(e)))?; + + let (mut prepare_checkout, _) = prepared_clone + .with_shallow(Shallow::DepthAtRemote(NonZero::new(1).unwrap())) + .fetch_then_checkout(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .map_err(|e| GixDownloadedError::Fetch(url.to_string(), Box::new(e)))?; + let (repo, _) = prepare_checkout + .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) + .map_err(|e| GixDownloadedError::Checkout(tmp_dir.to_path_buf(), Box::new(e)))?; + + repo + }; + + Ok(repo) +} + impl ProjectRead for GixDownloadedProject { type Error = GixDownloadedError; @@ -349,20 +460,52 @@ impl ProjectRead for GixDownloadedProject { // TODO: find a less hacky way to provide the SHA here, it should be saved when // repo is cloned fn sources(&self, _ctx: &ProjectContext) -> Result, Self::Error> { - // TODO: find a better way to obtain required SHA - let rev = if let Some(rev) = &self.rev { - rev.to_owned() - } else { - // If desired rev is not provided, use HEAD commit (i.e. current checked-out state) - let repo = gix::open(self.tmp_dir.path()).unwrap(); - - repo.head_commit().unwrap().id().to_string() - }; + // TODO: be more efficient + self.ensure_downloaded()?; + let repo = gix::open(self.tmp_dir.path()).unwrap(); + + let rev = repo.head_commit().unwrap().id().to_string(); Ok(vec![Source::RemoteGit { remote_git: self.url.to_string(), rev, - path: self.path.clone(), + path: None, + }]) + } +} + +impl ProjectRead for GixDownloadedProjectExact { + type Error = GixDownloadedError; + + fn get_project( + &self, + ) -> Result< + ( + Option, + Option, + ), + Self::Error, + > { + Ok(self.inner.get_project()?) + } + + type SourceReader<'a> + = FileWithLifetime<'a> + where + Self: 'a; + + fn read_source>( + &self, + path: P, + ) -> Result, Self::Error> { + Ok(FileWithLifetime::new(self.inner.read_source(path)?)) + } + + fn sources(&self, _ctx: &ProjectContext) -> Result, Self::Error> { + Ok(vec![Source::RemoteGit { + remote_git: self.url.clone(), + rev: self.rev.clone(), + path: self.path.as_ref().map(|p| p.to_string()), }]) } } @@ -490,8 +633,8 @@ mod tests { let path = path.strip_prefix(r"\\?\").unwrap_or(path); let project = GixDownloadedProject::new( format!("file://{path}"), - Some(String::from_utf8(hex_commit_sha).unwrap()), - None, + // Some(String::from_utf8(hex_commit_sha).unwrap()), + // None, )?; let (Some(info), Some(meta)) = project.get_project()? else { diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index e3d06bbf..dcbc2332 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -11,8 +11,6 @@ use typed_path::Utf8UnixPathBuf; #[cfg(feature = "filesystem")] use zip::{self, result::ZipError}; -use std::io::{self, Read}; - // TODO: use newtype for identifier IRI pub fn make_identifier_iri(publisher: impl AsRef, name: impl AsRef) -> String { let publisher = publisher.as_ref(); diff --git a/core/src/resolve/gix_git.rs b/core/src/resolve/gix_git.rs index 47c85501..18f5a738 100644 --- a/core/src/resolve/gix_git.rs +++ b/core/src/resolve/gix_git.rs @@ -59,12 +59,8 @@ impl ResolveRead for GitResolver { Ok(ResolutionOutcome::Resolved(std::iter::once( // TODO: use trim_prefix() once it's stable - GixDownloadedProject::new( - uri.as_str().strip_prefix("git+").unwrap_or(uri.as_str()), - None, - None, - ) - .map_err(|e| e.into()), + GixDownloadedProject::new(uri.as_str().strip_prefix("git+").unwrap_or(uri.as_str())) + .map_err(|e| e.into()), ))) } diff --git a/sysand/src/cli.rs b/sysand/src/cli.rs index 1034a426..f0ca59fd 100644 --- a/sysand/src/cli.rs +++ b/sysand/src/cli.rs @@ -244,6 +244,22 @@ pub enum Command { }, /// Prints the root directory of the current project PrintRoot, + /// Experimental commands. Likely to change in incompatible ways or be + /// removed in the future. + #[clap(verbatim_doc_comment)] + Experimental { + #[command(subcommand)] + subcommand: Option, + }, +} + +pub enum ExpCommand { + Add { + + } + Remove { + + } } #[derive(clap::Args, Debug, Clone)] diff --git a/sysand/src/commands/add.rs b/sysand/src/commands/add.rs index 9194ca2b..1c73a92a 100644 --- a/sysand/src/commands/add.rs +++ b/sysand/src/commands/add.rs @@ -16,6 +16,7 @@ use sysand_core::{ local_fs::{CONFIG_FILE, add_project_source_to_config}, }, context::ProjectContext, + lock::Source, model::InterchangeProjectUsageRaw, project::{ ProjectRead, @@ -57,13 +58,13 @@ pub fn command_add( let source = if let Some(path) = source_opts.from_path { let metadata = wrapfs::metadata(&path)?; if metadata.is_dir() { - Some(sysand_core::lock::Source::LocalSrc { + Some(Source::LocalSrc { src_path: get_relative(path, current_project.root_path())? .as_str() .into(), }) } else if metadata.is_file() { - Some(sysand_core::lock::Source::LocalKpar { + Some(Source::LocalKpar { kpar_path: get_relative(path, current_project.root_path())? .as_str() .into(), @@ -120,35 +121,38 @@ pub fn command_add( } source } else if let Some(editable) = source_opts.as_editable { - Some(sysand_core::lock::Source::Editable { + Some(Source::Editable { editable: get_relative(editable, current_project.root_path())? .as_str() .into(), }) } else if let Some(src_path) = source_opts.as_local_src { - Some(sysand_core::lock::Source::LocalSrc { + Some(Source::LocalSrc { src_path: get_relative(src_path, current_project.root_path())? .as_str() .into(), }) } else if let Some(kpar_path) = source_opts.as_local_kpar { - Some(sysand_core::lock::Source::LocalKpar { + Some(Source::LocalKpar { kpar_path: get_relative(kpar_path, current_project.root_path())? .as_str() .into(), }) } else if let Some(remote_src) = source_opts.as_remote_src { - Some(sysand_core::lock::Source::RemoteSrc { + Some(Source::RemoteSrc { remote_src: remote_src.into_string(), }) } else if let Some(remote_kpar) = source_opts.as_remote_kpar { - Some(sysand_core::lock::Source::RemoteKpar { + Some(Source::RemoteKpar { remote_kpar: remote_kpar.into_string(), remote_kpar_size: None, }) + // TODO: make all --as-* use new-style usages unconditionally, otherwise will need two impl for them } else if let Some(remote_git) = source_opts.as_remote_git { - Some(sysand_core::lock::Source::RemoteGit { + Some(Source::RemoteGit { remote_git: remote_git.into_string(), + rev: todo!(), + path: todo!(), }) } else { None @@ -182,7 +186,7 @@ pub fn command_add( HashMap::default() }; - let usage_raw = InterchangeProjectUsageRaw { + let usage_raw = InterchangeProjectUsageRaw::Resource { resource: iri.to_owned(), version_constraint, }; diff --git a/sysand/src/commands/env.rs b/sysand/src/commands/env.rs index 2211066e..de580af5 100644 --- a/sysand/src/commands/env.rs +++ b/sysand/src/commands/env.rs @@ -136,7 +136,7 @@ pub fn command_env_install( )?; add_single_env_project(iri, version.to_string(), env)?; } else { - let usages = vec![InterchangeProjectUsage { + let usages = vec![InterchangeProjectUsage::Resource { resource: fluent_uri::Iri::from_str(iri.as_ref())?, version_constraint: version.map(|v| semver::VersionReq::parse(&v)).transpose()?, }]; From 5e02c6dc554b411ac68931670bb0c9a9dc1feb0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= Date: Wed, 8 Apr 2026 07:43:08 +0300 Subject: [PATCH 4/5] wip MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrius Pukšta --- core/src/commands/info.rs | 36 ++-- core/src/commands/remove.rs | 44 ++++- core/src/env/mod.rs | 20 ++- core/src/lock.rs | 30 ++-- core/src/model.rs | 42 ++++- core/src/project/utils.rs | 62 ++++++- core/src/resolve/combined.rs | 35 ++-- core/src/resolve/env.rs | 18 +- core/src/resolve/file.rs | 13 +- core/src/resolve/gix_git.rs | 10 +- core/src/resolve/memory.rs | 7 +- core/src/resolve/mod.rs | 152 ++++++++-------- core/src/resolve/null.rs | 14 +- core/src/resolve/priority.rs | 58 ++++-- core/src/resolve/remote.rs | 31 +++- core/src/resolve/reqwest_http.rs | 7 +- core/src/resolve/sequential.rs | 21 ++- core/src/resolve/standard.rs | 8 +- core/src/resolve/typed_resolver.rs | 119 +++++++++++++ core/src/solve/pubgrub.rs | 5 +- sysand/src/cli.rs | 102 +++++++++-- sysand/src/commands/add.rs | 275 ++++++++++++++++++++++++++++- sysand/src/commands/clone.rs | 4 +- sysand/src/commands/remove.rs | 115 +++++++----- sysand/src/lib.rs | 60 ++++++- sysand/tests/cli_lock.rs | 2 +- 26 files changed, 1030 insertions(+), 260 deletions(-) create mode 100644 core/src/resolve/typed_resolver.rs diff --git a/core/src/commands/info.rs b/core/src/commands/info.rs index 3e4d866b..7e104a84 100644 --- a/core/src/commands/info.rs +++ b/core/src/commands/info.rs @@ -1,21 +1,32 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 +use camino::Utf8Path; use thiserror::Error; use crate::{ env::utils::ErrorBound, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{ + InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsage, + InterchangeProjectUsageRaw, InterchangeProjectValidationError, + }, project::ProjectRead, resolve::{ResolutionOutcome, ResolveRead}, }; #[derive(Error, Debug)] pub enum InfoError { - #[error("failed to resolve IRI `{0}`: {1}")] - NoResolve(Box, String), - #[error("IRI `{0}` is not supported: {1}")] - UnsupportedIri(Box, String), + #[error("cannot resolve usage: {0}")] + Unresolvable(String), + #[error("usage {0} is not supported: {1}")] + UnsupportedUsageType(InterchangeProjectUsage, String), + #[error("usage {0} is invalid: {1}")] + InvalidUsage( + InterchangeProjectUsageRaw, + InterchangeProjectValidationError, + ), + #[error("usage {0} was not found: {1}")] + NotFound(InterchangeProjectUsage, String), #[error("failure during resolution: {0}")] Resolution(#[from] Error), } @@ -58,10 +69,11 @@ pub fn do_info_project( } pub fn do_info, R: ResolveRead>( - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, resolver: &R, ) -> Result, InfoError> { - let outcome = resolver.resolve_read_raw(uri.as_ref())?; + let outcome = resolver.resolve_read_raw(usage, base_path)?; match outcome { ResolutionOutcome::Resolved(resolved) => { @@ -86,9 +98,13 @@ pub fn do_info, R: ResolveRead>( } Ok(result) } - ResolutionOutcome::UnsupportedIRIType(e) => { - Err(InfoError::UnsupportedIri(uri.as_ref().into(), e)) + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + Err(InfoError::UnsupportedUsageType(usage, reason)) } - ResolutionOutcome::Unresolvable(e) => Err(InfoError::NoResolve(uri.as_ref().into(), e)), + ResolutionOutcome::NotFound(usage, reason) => Err(InfoError::NotFound(usage, reason)), + ResolutionOutcome::InvalidUsage(usage, reason) => { + Err(InfoError::InvalidUsage(usage, reason)) + } + ResolutionOutcome::Unresolvable(msg) => Err(InfoError::Unresolvable(msg)), } } diff --git a/core/src/commands/remove.rs b/core/src/commands/remove.rs index 4352acb2..9a548d51 100644 --- a/core/src/commands/remove.rs +++ b/core/src/commands/remove.rs @@ -11,12 +11,14 @@ pub enum RemoveError { Project(ProjectError), #[error("could not find usage for `{0}`")] UsageNotFound(Box), - #[error("could not find project information for `{0}`")] - MissingInfo(Box), + #[error("could not find usage with publisher `{0}`, name `{1}`")] + ExperimentalUsageNotFound(Box, Box), + #[error("current project info was removed")] + MissingInfo, } pub fn do_remove>( - project: &mut P, + current_project: &mut P, iri: S, ) -> Result, RemoveError> { let removing = "Removing"; @@ -26,18 +28,48 @@ pub fn do_remove>( iri.as_ref() ); - if let Some(mut info) = project.get_info().map_err(RemoveError::Project)? { + if let Some(mut info) = current_project.get_info().map_err(RemoveError::Project)? { let popped = info.pop_usage(&iri.as_ref().to_string()); if popped.is_empty() { Err(RemoveError::UsageNotFound(iri.as_ref().into())) } else { - project + current_project .put_info(&info, true) .map_err(RemoveError::Project)?; Ok(popped) } } else { - Err(RemoveError::MissingInfo(iri.as_ref().into())) + Err(RemoveError::MissingInfo) + } +} + +pub fn do_remove_experimental( + current_project: &mut P, + publisher: impl AsRef, + name: impl AsRef, +) -> Result, RemoveError> { + let publisher = publisher.as_ref(); + let name = name.as_ref(); + let removing = "Removing"; + let header = crate::style::get_style_config().header; + log::info!("{header}{removing:>12}{header:#} project `{publisher}`/`{name}` from usages"); + + if let Some(mut info) = current_project.get_info().map_err(RemoveError::Project)? { + let popped = info.pop_usage_experimental(publisher, name); + + if popped.is_empty() { + Err(RemoveError::ExperimentalUsageNotFound( + publisher.into(), + name.into(), + )) + } else { + current_project + .put_info(&info, true) + .map_err(RemoveError::Project)?; + Ok(popped) + } + } else { + Err(RemoveError::MissingInfo) } } diff --git a/core/src/env/mod.rs b/core/src/env/mod.rs index 09092419..5b307aac 100644 --- a/core/src/env/mod.rs +++ b/core/src/env/mod.rs @@ -43,32 +43,34 @@ pub trait ReadEnvironment { fn uris(&self) -> Result; type VersionIter: IntoIterator>; - fn versions>(&self, uri: S) -> Result; + fn versions>(&self, identifier: S) -> Result; type InterchangeProjectRead: ProjectRead + Debug; fn get_project, T: AsRef>( &self, - uri: S, + // TODO: change this for the new env structure. Then every project will once again be + // identified by an IRI + identifier: S, version: T, ) -> Result; // Utilities - fn has>(&self, uri: S) -> Result { + fn has>(&self, identifier: S) -> Result { Ok(self .uris()? .into_iter() .filter_map(Result::ok) - .any(|u: String| u == uri.as_ref())) + .any(|id| id == identifier.as_ref())) } fn has_version, V: AsRef>( &self, - uri: S, + identifier: S, version: V, ) -> Result { Ok(self - .versions(&uri)? + .versions(identifier)? .into_iter() .filter_map(Result::ok) .any(|v: String| v == version.as_ref())) @@ -76,13 +78,13 @@ pub trait ReadEnvironment { fn candidate_projects>( &self, - uri: S, + identifier: S, ) -> Result, Self::ReadError> { - let versions: Result, _> = self.versions(&uri)?.into_iter().collect(); + let versions: Result, _> = self.versions(&identifier)?.into_iter().collect(); let projects: Result, _> = versions? .into_iter() - .map(|v| self.get_project(&uri, v)) + .map(|v| self.get_project(&identifier, v)) .collect(); projects diff --git a/core/src/lock.rs b/core/src/lock.rs index 1924c8d7..3662498b 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -21,7 +21,7 @@ use crate::{ env::ReadEnvironment, project::{ ProjectRead, - utils::{deserialize_unix_path, serialize_unix_path}, + utils::{Identifier, deserialize_unix_path, serialize_unix_path}, }, }; @@ -712,35 +712,35 @@ impl Source { } #[derive(Clone, Eq, Debug, PartialEq, Serialize, Deserialize, PartialOrd, Ord, Hash)] -pub struct Usage(String); +pub struct Usage(Identifier); impl Display for Usage { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(&self.0) + write!(f, "{}", &self.0) } } -impl From for Usage { - fn from(value: String) -> Self { - Self(value) - } -} +// impl From for Usage { +// fn from(value: String) -> Self { +// Self(value) +// } +// } impl From for String { fn from(value: Usage) -> Self { - value.0 + value.0.into_string() } } -impl From<&str> for Usage { - fn from(value: &str) -> Self { - Self(value.to_owned()) - } -} +// impl From<&str> for Usage { +// fn from(value: &str) -> Self { +// Self(value.to_owned()) +// } +// } impl Usage { pub fn to_toml(&self) -> Value { - Value::from(&self.0) + Value::from(self.0.as_str()) } } diff --git a/core/src/model.rs b/core/src/model.rs index c4861c30..4aa11538 100644 --- a/core/src/model.rs +++ b/core/src/model.rs @@ -155,7 +155,9 @@ pub type InterchangeProjectUsageRaw = InterchangeProjectUsageG, semver::VersionReq, Utf8UnixPathBuf>; -impl Display for InterchangeProjectUsageRaw { +impl Display + for InterchangeProjectUsageG +{ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { InterchangeProjectUsageG::Resource { @@ -508,6 +510,30 @@ impl &mut self, resource: &Iri, ) -> Vec> { + self.usage + .extract_if(.., |u| { + if let InterchangeProjectUsageG::Resource { resource: r, .. } = u + && r == resource + { + true + } else { + false + } + }) + .collect() + } + + /// Remove and return all usages matching `publisher`/`name`. + /// Note that sysand will never add multiple usages of the same resource + /// to the project, but it does tolerate such usages. + // TODO: the spec does not say anything about this and should be clarified + pub fn pop_usage_experimental( + &mut self, + publisher: impl AsRef, + name: impl AsRef, + ) -> Vec> { + let p = publisher.as_ref(); + let n = name.as_ref(); self.usage .extract_if(.., |u| match u { // TODO: how to match here? Simplest would be to require the same info as for @@ -515,7 +541,19 @@ impl // some sort of separate "matcher" type that allows wildcarding everything // apart from: any sort of IRI/URL, publisher+name. // Then how to allow providing version (constraint) and possibly other matchers? - _ => todo!("this needs new design of pop_usage and CLI surface"), + InterchangeProjectUsageG::Resource { .. } => false, + InterchangeProjectUsageG::Url { + publisher, name, .. + } + | InterchangeProjectUsageG::Path { + publisher, name, .. + } + | InterchangeProjectUsageG::Git { + publisher, name, .. + } + | InterchangeProjectUsageG::Index { + publisher, name, .. + } => publisher == p && name == n, }) .collect() } diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index dcbc2332..54d68962 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -1,16 +1,74 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use std::io::{self, Read}; +use std::{ + fmt::Display, + io::{self, Read}, +}; use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; use fluent_uri::pct_enc::{EString, encoder::IData}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use thiserror::Error; use typed_path::Utf8UnixPathBuf; #[cfg(feature = "filesystem")] use zip::{self, result::ZipError}; +use crate::model::InterchangeProjectUsage; + +/// Project identifier IRI. Constructed by +// TODO: steps +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct Identifier(String); + +impl Identifier { + pub fn from_interchange_usage(usage: &InterchangeProjectUsage) -> Identifier { + let (publisher, name) = match usage { + InterchangeProjectUsage::Resource { resource, .. } => { + return Self(resource.to_string()); + } + InterchangeProjectUsage::Url { + publisher, name, .. + } + | InterchangeProjectUsage::Path { + publisher, name, .. + } + | InterchangeProjectUsage::Git { + publisher, name, .. + } + | InterchangeProjectUsage::Index { + publisher, name, .. + } => (publisher, name), + }; + Self(make_identifier_iri(publisher, name)) + } + + pub fn as_str(&self) -> &str { + &self.0 + } + + pub fn into_string(self) -> String { + self.0 + } +} + +impl AsRef for Identifier { + fn as_ref(&self) -> &str { + &self.0 + } +} + +// impl From for Identifier { +// fn from(value: Usage) -> Self { +// } +// } + +impl Display for Identifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0) + } +} + // TODO: use newtype for identifier IRI pub fn make_identifier_iri(publisher: impl AsRef, name: impl AsRef) -> String { let publisher = publisher.as_ref(); diff --git a/core/src/resolve/combined.rs b/core/src/resolve/combined.rs index 2da3b5df..df985a65 100644 --- a/core/src/resolve/combined.rs +++ b/core/src/resolve/combined.rs @@ -3,6 +3,7 @@ use std::{fmt::Debug, iter::Peekable}; +use camino::Utf8Path; use indexmap::IndexMap; use thiserror::Error; use typed_path::Utf8UnixPath; @@ -11,7 +12,8 @@ use crate::{ context::ProjectContext, lock::Source, model::{ - InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, ProjectHash, project_hash_raw, + InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsage, + ProjectHash, project_hash_raw, }, project::{ProjectRead, cached::CachedProject}, resolve::{ResolutionOutcome, ResolveRead, null::NullResolver}, @@ -229,7 +231,8 @@ impl< fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let mut at_least_one_supports = false; @@ -238,10 +241,10 @@ impl< // TODO: autodetect git (and possibly other VCSs), and use appropriate (e.g. git) resolver for them. if let Some(file_resolver) = &self.file_resolver { match file_resolver - .resolve_read(uri) + .resolve_read(usage) .map_err(CombinedResolverError::File)? { - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { log::debug!("file resolver rejected IRI `{uri}`: {msg}"); } // Just continue ResolutionOutcome::Resolved(r) => { @@ -251,8 +254,8 @@ impl< locals: IndexMap::new(), })); } - ResolutionOutcome::Unresolvable(msg) => { - return Ok(ResolutionOutcome::Unresolvable(format!( + ResolutionOutcome::NotFound(msg) => { + return Ok(ResolutionOutcome::NotFound(format!( "failed to resolve as file: {msg}" ))); } @@ -294,10 +297,10 @@ impl< } } } - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { log::debug!("local resolver rejected IRI `{uri}`: {msg}"); } - ResolutionOutcome::Unresolvable(msg) => { + ResolutionOutcome::NotFound(msg) => { at_least_one_supports = true; log::debug!("local resolver unable to resolve IRI `{uri}`: {msg}"); } @@ -313,10 +316,10 @@ impl< .resolve_read(uri) .map_err(CombinedResolverError::Remote)? { - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { log::debug!("remote resolver rejected IRI `{uri}`: {msg}"); } - ResolutionOutcome::Unresolvable(msg) => { + ResolutionOutcome::NotFound(msg) => { at_least_one_supports = true; log::debug!("remote resolver unable to resolve IRI `{uri}`: {msg}"); } @@ -387,10 +390,10 @@ impl< locals, })); } - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { log::debug!("registry resolver rejected IRI `{uri}` due to: {msg}"); } - ResolutionOutcome::Unresolvable(msg) => { + ResolutionOutcome::NotFound(msg) => { at_least_one_supports = true; log::debug!("registry resolver unable to resolve IRI `{uri}`: {msg}"); } @@ -399,11 +402,11 @@ impl< // As a last resort, use only locally cached projects, if any were found if !at_least_one_supports { - Ok(ResolutionOutcome::UnsupportedIRIType( + Ok(ResolutionOutcome::UnsupportedUsageType( "no resolver accepted the IRI".to_owned(), )) } else if locals.is_empty() { - Ok(ResolutionOutcome::Unresolvable( + Ok(ResolutionOutcome::NotFound( "no resolver was able to resolve the IRI".to_owned(), )) } else { @@ -676,7 +679,7 @@ mod tests { index_resolver: NO_RESOLVER, }; - let Ok(crate::resolve::ResolutionOutcome::UnsupportedIRIType(_)) = + let Ok(crate::resolve::ResolutionOutcome::UnsupportedUsageType(_)) = resolver.resolve_read_raw(example_uri) else { panic!() @@ -694,7 +697,7 @@ mod tests { index_resolver: empty_any_resolver(), }; - let Ok(crate::resolve::ResolutionOutcome::Unresolvable(_)) = + let Ok(crate::resolve::ResolutionOutcome::NotFound(_)) = resolver.resolve_read_raw(example_uri) else { panic!() diff --git a/core/src/resolve/env.rs b/core/src/resolve/env.rs index e9b7946c..69b2a65a 100644 --- a/core/src/resolve/env.rs +++ b/core/src/resolve/env.rs @@ -1,9 +1,12 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 +use camino::Utf8Path; + // Resolve IRIs in an environment use crate::{ env::{ReadEnvironment, ReadEnvironmentAsync}, + model::InterchangeProjectUsage, resolve::{ResolutionOutcome, ResolveRead, ResolveReadAsync}, }; @@ -21,7 +24,8 @@ impl ResolveRead for EnvResolver { fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let versions = self.env.versions(uri)?; @@ -34,9 +38,10 @@ impl ResolveRead for EnvResolver { ) .collect(); if projects.is_empty() { - Ok(ResolutionOutcome::Unresolvable(format!( - "no versions of `{uri}` found in environment" - ))) + Ok(ResolutionOutcome::NotFound( + usage.to_owned(), + String::from("no versions of `{uri}` found in environment"), + )) } else { Ok(ResolutionOutcome::Resolved(projects)) } @@ -59,13 +64,14 @@ impl ResolveReadAsync for EnvResolver { async fn resolve_read_async( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { use futures::StreamExt as _; let versions: Vec> = self.env.versions_async(uri).await?.collect().await; if versions.is_empty() { - return Ok(ResolutionOutcome::Unresolvable(format!( + return Ok(ResolutionOutcome::NotFound(format!( "no versions of `{uri}` found in environment" ))); } diff --git a/core/src/resolve/file.rs b/core/src/resolve/file.rs index 2b5d5666..d259b61c 100644 --- a/core/src/resolve/file.rs +++ b/core/src/resolve/file.rs @@ -88,7 +88,7 @@ impl FileResolver { if let Some(root_part) = &self.relative_path_root { root_part.join(&path) } else { - return Ok(ResolutionOutcome::UnsupportedIRIType(format!( + return Ok(ResolutionOutcome::UnsupportedUsageType(format!( "cannot resolve relative file without a specified root directory: {}", path ))); @@ -129,7 +129,7 @@ impl FileResolver { ) -> Result, FileResolverError> { match try_file_uri_to_path(uri)? { Some(path) => self.resolve_platform_path(path), - None => Ok(ResolutionOutcome::UnsupportedIRIType(format!( + None => Ok(ResolutionOutcome::UnsupportedUsageType(format!( "`{uri}` is not a file URL", ))), } @@ -278,7 +278,8 @@ impl ResolveRead for FileResolver { fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { Ok(match self.resolve_general(uri)? { ResolutionOutcome::Resolved(path) => ResolutionOutcome::Resolved(vec![ @@ -290,10 +291,10 @@ impl ResolveRead for FileResolver { LocalKParProject::new_guess_root(path)?, )), ]), - ResolutionOutcome::UnsupportedIRIType(msg) => { - ResolutionOutcome::UnsupportedIRIType(msg) + ResolutionOutcome::UnsupportedUsageType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) } - ResolutionOutcome::Unresolvable(msg) => ResolutionOutcome::Unresolvable(msg), + ResolutionOutcome::NotFound(msg) => ResolutionOutcome::NotFound(msg), }) } } diff --git a/core/src/resolve/gix_git.rs b/core/src/resolve/gix_git.rs index 18f5a738..d03fc167 100644 --- a/core/src/resolve/gix_git.rs +++ b/core/src/resolve/gix_git.rs @@ -1,7 +1,9 @@ +use camino::Utf8Path; use fluent_uri::component::Scheme; use thiserror::Error; use crate::{ + model::InterchangeProjectUsageRaw, project::gix_git_download::{GixDownloadedError, GixDownloadedProject}, resolve::{ ResolutionOutcome, ResolveRead, @@ -34,7 +36,8 @@ impl ResolveRead for GitResolver { fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let scheme = uri.scheme(); @@ -50,7 +53,7 @@ impl ResolveRead for GitResolver { ] .contains(&scheme) { - return Ok(ResolutionOutcome::UnsupportedIRIType(format!( + return Ok(ResolutionOutcome::UnsupportedUsageType(format!( "url scheme `{}` of IRI `{}` is not known to be git-compatible", scheme, uri.as_str() @@ -66,7 +69,8 @@ impl ResolveRead for GitResolver { fn resolve_read_raw>( &self, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, ) -> Result, Self::Error> { if let Some(stripped_uri) = uri.as_ref().strip_prefix("git+") { self.default_resolve_read_raw(stripped_uri) diff --git a/core/src/resolve/memory.rs b/core/src/resolve/memory.rs index a3ea2ecb..e95e8472 100644 --- a/core/src/resolve/memory.rs +++ b/core/src/resolve/memory.rs @@ -85,17 +85,18 @@ impl ResolveRead fn resolve_read( &self, - uri: &Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { if !self.iri_predicate.accept_iri(uri) { - return Ok(ResolutionOutcome::UnsupportedIRIType(format!( + return Ok(ResolutionOutcome::UnsupportedUsageType(format!( "invalid IRI `{uri}` for this memory resolver" ))); } Ok(match self.projects.get(uri) { Some(xs) => ResolutionOutcome::Resolved(xs.iter().map(|x| Ok(x.clone())).collect()), - None => ResolutionOutcome::Unresolvable(uri.to_string()), + None => ResolutionOutcome::NotFound(uri.to_string()), }) } } diff --git a/core/src/resolve/mod.rs b/core/src/resolve/mod.rs index b2c0b988..ac8e3347 100644 --- a/core/src/resolve/mod.rs +++ b/core/src/resolve/mod.rs @@ -5,9 +5,13 @@ use std::{fmt::Debug, sync::Arc}; use crate::{ env::{SyncStreamIter, utils::ErrorBound}, + model::{ + InterchangeProjectUsage, InterchangeProjectUsageRaw, InterchangeProjectValidationError, + }, project::{AsAsyncProject, AsSyncProjectTokio, ProjectRead, ProjectReadAsync}, }; +use camino::Utf8Path; use futures::stream::StreamExt as _; pub mod combined; @@ -27,15 +31,27 @@ pub mod reqwest_http; pub mod sequential; #[cfg(all(feature = "filesystem", feature = "networking"))] pub mod standard; +pub mod typed_resolver; #[derive(Debug)] pub enum ResolutionOutcome { /// Successfully resolved a `T`. If `T` is a collection/iterator, /// it must contain at least one element Resolved(T), - /// Resolution failed due to an unsupported type of IRI - UnsupportedIRIType(String), - /// Resolution failed due to an invalid IRI that is in principle supported + /// Resolution failed due to an unsupported type of usage + UnsupportedUsageType { + usage: InterchangeProjectUsage, + reason: String, + }, + /// The supplied usage was invalid. Must not be used when InterchangeProjectUsage + /// (non-raw) is supplied + InvalidUsage( + InterchangeProjectUsageRaw, + InterchangeProjectValidationError, + ), + /// Usage was not found + NotFound(InterchangeProjectUsage, String), + /// Resolution failed due to an invalid usage that is in principle supported Unresolvable(String), } @@ -43,8 +59,12 @@ impl ResolutionOutcome { pub fn map U>(self, op: F) -> ResolutionOutcome { match self { Self::Resolved(t) => ResolutionOutcome::Resolved(op(t)), - Self::UnsupportedIRIType(e) => ResolutionOutcome::UnsupportedIRIType(e), - Self::Unresolvable(e) => ResolutionOutcome::Unresolvable(e), + Self::UnsupportedUsageType { usage, reason } => { + ResolutionOutcome::UnsupportedUsageType { usage, reason } + } + Self::InvalidUsage(usage, err) => ResolutionOutcome::InvalidUsage(usage, err), + Self::NotFound(usage, reason) => ResolutionOutcome::NotFound(usage, reason), + Self::Unresolvable(msg) => ResolutionOutcome::Unresolvable(msg), } } } @@ -57,29 +77,33 @@ pub trait ResolveRead { type ProjectStorage: ProjectRead; type ResolvedStorages: IntoIterator>; - fn default_resolve_read_raw>( + // TODO: move path-specific docs to FileResolver + /// `base_path` is absolute/relative to CWD path of the project to which this usage + /// belongs. Relative path usages will be resolved using `base_path` as base. + /// If `base_path` is `None` and usage is a relative path, resolution will fail + fn default_resolve_read_raw( &self, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, ) -> Result, Self::Error> { - match fluent_uri::Iri::parse(uri.as_ref().to_string()) { - Ok(uri) => self.resolve_read(&uri), - Err((err, val)) => Ok(ResolutionOutcome::UnsupportedIRIType(format!( - "unable to parse IRI `{}`: {}", - val, err - ))), + match usage.validate() { + Ok(u) => self.resolve_read(&u, base_path), + Err(err) => Ok(ResolutionOutcome::InvalidUsage(usage.to_owned(), err)), } } - fn resolve_read_raw>( + fn resolve_read_raw( &self, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, ) -> Result, Self::Error> { - self.default_resolve_read_raw(uri) + self.default_resolve_read_raw(usage, base_path) } fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error>; /// Treat this `ResolveRead` as a (trivial) `ResolveReadAsync` @@ -97,31 +121,31 @@ pub trait ResolveReadAsync { type ProjectStorage: ProjectReadAsync; type ResolvedStorages: futures::Stream>; - fn default_resolve_read_raw_async>( + fn default_resolve_read_raw_async( &self, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, ) -> impl Future, Self::Error>> { async move { - match fluent_uri::Iri::parse(uri.as_ref().to_string()) { - Ok(uri) => self.resolve_read_async(&uri).await, - Err((err, val)) => Ok(ResolutionOutcome::UnsupportedIRIType(format!( - "unable to parse IRI `{}`: {}", - val, err - ))), + match usage.validate() { + Ok(u) => self.resolve_read_async(&u, base_path).await, + Err(err) => Ok(ResolutionOutcome::InvalidUsage(usage.to_owned(), err)), } } } - fn resolve_read_raw_async>( + fn resolve_read_raw_async( &self, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option>, ) -> impl Future, Self::Error>> { - async move { self.default_resolve_read_raw_async(uri).await } + async move { self.default_resolve_read_raw_async(usage, base_path).await } } fn resolve_read_async( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> impl Future, Self::Error>>; // Maybe make this return an associated type instead? Would, for example, allow @@ -165,27 +189,19 @@ where >, >, >; - //futures::stream::Iter<<::ResolvedStorages as IntoIterator>::IntoIter>; async fn resolve_read_async( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { - Ok(match self.inner.resolve_read(uri)? { - ResolutionOutcome::Resolved(projects) => ResolutionOutcome::Resolved({ - let projects_map: std::iter::Map<_, fn(_) -> _> = projects - .into_iter() - .map(|proj| Ok(AsAsyncProject { inner: proj? })); - - futures::stream::iter(projects_map) - }), - ResolutionOutcome::UnsupportedIRIType(msg) => { - ResolutionOutcome::UnsupportedIRIType(msg) - } - ResolutionOutcome::Unresolvable(msg) => ResolutionOutcome::Unresolvable(msg), - }) - //let bar = foo.map(|x| futures::stream::iter(x.into_iter()); - //Ok(bar) + Ok(self.inner.resolve_read(usage, base_path)?.map(|storages| { + let projects_map: std::iter::Map<_, fn(_) -> _> = storages + .into_iter() + .map(|proj| Ok(AsAsyncProject { inner: proj? })); + + futures::stream::iter(projects_map) + })) } } @@ -222,31 +238,27 @@ where fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { - Ok( - match self.runtime.block_on(self.inner.resolve_read_async(uri))? { - ResolutionOutcome::Resolved(storages) => { - let runtime_clone = self.runtime.clone(); - - let inner: futures::stream::Map<_, Box _>> = - storages.map(Box::new(move |project| { - Ok(AsSyncProjectTokio { - runtime: runtime_clone.clone(), - inner: project?, - }) - })); - - ResolutionOutcome::Resolved(SyncStreamIter { - runtime: self.runtime.clone(), - inner, - }) - } - ResolutionOutcome::UnsupportedIRIType(msg) => { - ResolutionOutcome::UnsupportedIRIType(msg) + Ok(self + .runtime + .block_on(self.inner.resolve_read_async(usage, base_path))? + .map(|storages| { + let runtime_clone = self.runtime.clone(); + + let inner: futures::stream::Map<_, Box _>> = + storages.map(Box::new(move |project| { + Ok(AsSyncProjectTokio { + runtime: runtime_clone.clone(), + inner: project?, + }) + })); + + SyncStreamIter { + runtime: self.runtime.clone(), + inner, } - ResolutionOutcome::Unresolvable(msg) => ResolutionOutcome::Unresolvable(msg), - }, - ) + })) } } diff --git a/core/src/resolve/null.rs b/core/src/resolve/null.rs index c804c878..2e7b8384 100644 --- a/core/src/resolve/null.rs +++ b/core/src/resolve/null.rs @@ -3,7 +3,9 @@ use std::convert::Infallible; -use crate::{project::null::NullProject, resolve::ResolveRead}; +use camino::Utf8Path; + +use crate::{model::InterchangeProjectUsage, project::null::NullProject, resolve::ResolveRead}; #[derive(Debug)] pub struct NullResolver {} @@ -17,10 +19,12 @@ impl ResolveRead for NullResolver { fn resolve_read( &self, - _uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + _base_path: Option>, ) -> Result, Self::Error> { - Ok(super::ResolutionOutcome::UnsupportedIRIType( - "null resolver".to_string(), - )) + Ok(super::ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: "null resolver".to_string(), + }) } } diff --git a/core/src/resolve/priority.rs b/core/src/resolve/priority.rs index 163a03eb..f1a5d14e 100644 --- a/core/src/resolve/priority.rs +++ b/core/src/resolve/priority.rs @@ -3,13 +3,14 @@ use std::{ io::{self, Read}, }; +use camino::Utf8Path; use thiserror::Error; use crate::{ context::ProjectContext, env::utils::ErrorBound, lock::Source, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsage}, project::ProjectRead, resolve::ResolveRead, }; @@ -173,11 +174,12 @@ impl ResolveRead for PriorityResolver, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { match self .higher - .resolve_read(uri) + .resolve_read(usage, base_path.as_ref()) .map_err(PriorityError::Higher)? { ResolutionOutcome::Resolved(resolved) => { @@ -185,17 +187,21 @@ impl ResolveRead for PriorityResolver { - log::debug!("higher priority resolver rejected IRI: {msg}") + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("higher priority resolver rejected usage {usage}: {reason}") + } + ResolutionOutcome::NotFound(usage, reason) => { + log::debug!("higher priority resolver did not find usage {usage}: {reason}") } ResolutionOutcome::Unresolvable(msg) => { - log::debug!("higher priority resolver failed to resolve IRI: {msg}") + log::debug!("cannot resolve usage: {msg}") } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), }; Ok(self .lower - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(PriorityError::Lower)? .map(|resolved| PriorityIterator::LowerIterator(resolved.into_iter()))) } @@ -209,7 +215,9 @@ mod tests { use indexmap::IndexMap; use crate::{ - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{ + InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsageRaw, + }, project::{ProjectRead as _, memory::InMemoryProject}, resolve::{ ResolutionOutcome, ResolveRead, @@ -259,11 +267,12 @@ mod tests { } } - fn expect_to_resolve>( + fn expect_to_resolve( resolver: &R, - uri: S, + usage: &InterchangeProjectUsageRaw, + base_path: Option<&str>, ) -> Vec { - let resolved = resolver.resolve_read_raw(uri).unwrap(); + let resolved = resolver.resolve_read_raw(usage, base_path).unwrap(); let foo_projects: Result, _> = if let ResolutionOutcome::Resolved(foo_projects) = resolved { @@ -289,17 +298,38 @@ mod tests { let resolver = super::PriorityResolver::new(higher, lower); - let foos = expect_to_resolve(&resolver, "urn:kpar:foo"); + let foos = expect_to_resolve( + &resolver, + &crate::model::InterchangeProjectUsageRaw::Resource { + resource: String::from("urn:kpar:foo"), + version_constraint: None, + }, + None, + ); assert_eq!(foos.len(), 1); assert_eq!(foos[0].version().unwrap(), Some("1.2.3".to_string())); - let bars = expect_to_resolve(&resolver, "urn:kpar:bar"); + let bars = expect_to_resolve( + &resolver, + &crate::model::InterchangeProjectUsageRaw::Resource { + resource: String::from("urn:kpar:bar"), + version_constraint: None, + }, + None, + ); assert_eq!(bars.len(), 1); assert_eq!(bars[0].version().unwrap(), Some("1.2.3".to_string())); - let bazs = expect_to_resolve(&resolver, "urn:kpar:baz"); + let bazs = expect_to_resolve( + &resolver, + &crate::model::InterchangeProjectUsageRaw::Resource { + resource: String::from("urn:kpar:baz"), + version_constraint: None, + }, + None, + ); assert_eq!(bazs.len(), 1); assert_eq!(bazs[0].version().unwrap(), Some("3.2.1".to_string())); diff --git a/core/src/resolve/remote.rs b/core/src/resolve/remote.rs index 0b33532c..16a1af42 100644 --- a/core/src/resolve/remote.rs +++ b/core/src/resolve/remote.rs @@ -1,10 +1,12 @@ use std::io::{self, Read}; +use camino::Utf8Path; use thiserror::Error; use crate::{ context::ProjectContext, lock::Source, + model::InterchangeProjectUsage, project::ProjectRead, resolve::{ResolveRead, null::NullResolver}, }; @@ -213,22 +215,28 @@ impl ResolveRead fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let resolved_http = if let Some(http_resolver) = &self.http_resolver { match http_resolver - .resolve_read(uri) + .resolve_read(usage, base_path.as_ref()) .map_err(RemoteResolverError::HTTPResolver)? { ResolutionOutcome::Resolved(resolved) => Some(resolved.into_iter()), - ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("HTTP resolver rejected IRI: {msg}"); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("HTTP resolver rejected usage {usage}: {reason}"); + None + } + ResolutionOutcome::NotFound(usage, reason) => { + log::debug!("HTTP resolver did not find usage {usage}: {reason}"); None } ResolutionOutcome::Unresolvable(msg) => { - log::debug!("HTTP resolver failed to resolve IRI: {msg}"); + log::debug!("HTTP resolver refused to resolve usage: {msg}"); None } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), } } else { None @@ -236,18 +244,23 @@ impl ResolveRead let resolved_git = if let Some(git_resolver) = &self.git_resolver { match git_resolver - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(RemoteResolverError::GitResolver)? { ResolutionOutcome::Resolved(resolved) => Some(resolved.into_iter()), - ResolutionOutcome::UnsupportedIRIType(msg) => { - log::debug!("git resolver rejected IRI: {msg}"); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("git resolver rejected usage {usage}: {reason}"); + None + } + ResolutionOutcome::NotFound(usage, reason) => { + log::debug!("git resolver did not find usage {usage}: {reason}"); None } ResolutionOutcome::Unresolvable(msg) => { - log::debug!("git resolver failed to resolve IRI: {msg}"); + log::debug!("git resolver refused to resolve usage: {msg}"); None } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), } } else { None diff --git a/core/src/resolve/reqwest_http.rs b/core/src/resolve/reqwest_http.rs index 48a1ae76..18ff7876 100644 --- a/core/src/resolve/reqwest_http.rs +++ b/core/src/resolve/reqwest_http.rs @@ -265,7 +265,8 @@ impl ResolveReadAsync for HTTPResolverAsync async fn resolve_read_async( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { // Try to resolve as a HTTP src project. Ok( @@ -281,10 +282,10 @@ impl ResolveReadAsync for HTTPResolverAsync // prefer_ranged: self.prefer_ranged, })) } else { - ResolutionOutcome::UnsupportedIRIType("invalid http(s) URL".to_string()) + ResolutionOutcome::UnsupportedUsageType("invalid http(s) URL".to_string()) } } else { - ResolutionOutcome::UnsupportedIRIType("not an http(s) URL".to_string()) + ResolutionOutcome::UnsupportedUsageType("not an http(s) URL".to_string()) }, ) } diff --git a/core/src/resolve/sequential.rs b/core/src/resolve/sequential.rs index 84f2b263..2df3d1d7 100644 --- a/core/src/resolve/sequential.rs +++ b/core/src/resolve/sequential.rs @@ -27,7 +27,8 @@ impl ResolveRead for SequentialResolver { fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let mut iters = vec![]; let mut any_supported = false; @@ -39,10 +40,10 @@ impl ResolveRead for SequentialResolver { any_supported = true; iters.push(storages) } - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { msgs.push(msg); } - ResolutionOutcome::Unresolvable(msg) => { + ResolutionOutcome::NotFound(msg) => { any_supported = true; msgs.push(msg); } @@ -52,12 +53,12 @@ impl ResolveRead for SequentialResolver { if !iters.is_empty() { Ok(ResolutionOutcome::Resolved(iters.into_iter().flatten())) } else if any_supported { - Ok(ResolutionOutcome::Unresolvable(format!( + Ok(ResolutionOutcome::NotFound(format!( "unresolvable: {:?}", msgs ))) } else { - Ok(ResolutionOutcome::UnsupportedIRIType(format!( + Ok(ResolutionOutcome::UnsupportedUsageType(format!( "unsupported IRI: {:?}", msgs ))) @@ -76,7 +77,8 @@ impl ResolveReadAsync for SequentialResolver { async fn resolve_read_async( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { let outcomes = futures::future::join_all( self.inner @@ -95,10 +97,10 @@ impl ResolveReadAsync for SequentialResolver { any_supported = true; streams.push(storages) } - ResolutionOutcome::UnsupportedIRIType(msg) => { + ResolutionOutcome::UnsupportedUsageType(msg) => { msgs.push(msg); } - ResolutionOutcome::Unresolvable(msg) => { + ResolutionOutcome::NotFound(msg) => { any_supported = true; msgs.push(msg); } @@ -110,12 +112,13 @@ impl ResolveReadAsync for SequentialResolver { futures::stream::iter(streams).flatten(), )) } else if any_supported { + // TODO: use NotFound? Ok(ResolutionOutcome::Unresolvable(format!( "unresolvable: {:?}", msgs ))) } else { - Ok(ResolutionOutcome::UnsupportedIRIType(format!( + Ok(ResolutionOutcome::UnsupportedUsageType(format!( "unsupported IRI: {:?}", msgs ))) diff --git a/core/src/resolve/standard.rs b/core/src/resolve/standard.rs index fd78416f..9c7b8bf0 100644 --- a/core/src/resolve/standard.rs +++ b/core/src/resolve/standard.rs @@ -3,12 +3,13 @@ use std::{fmt, result::Result, sync::Arc}; -use camino::Utf8PathBuf; +use camino::{Utf8Path, Utf8PathBuf}; use reqwest_middleware::ClientWithMiddleware; use crate::{ auth::HTTPAuthentication, env::{local_directory::LocalDirectoryEnvironment, reqwest_http::HTTPEnvironmentAsync}, + model::InterchangeProjectUsage, resolve::{ AsSyncResolveTokio, ResolveRead, ResolveReadAsync, combined::CombinedResolver, @@ -50,9 +51,10 @@ impl ResolveRead for StandardResolver { fn resolve_read( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, Self::Error> { - self.0.resolve_read(uri) + self.0.resolve_read(usage, base_path) } } diff --git a/core/src/resolve/typed_resolver.rs b/core/src/resolve/typed_resolver.rs new file mode 100644 index 00000000..57fc124a --- /dev/null +++ b/core/src/resolve/typed_resolver.rs @@ -0,0 +1,119 @@ +use url::Url; + +use crate::{ + auth::HTTPAuthentication, + env::utils::ErrorBound, + model::{InterchangeProjectUsage, InterchangeProjectUsageG}, + project::ProjectRead, + resolve::{ + ResolutionOutcome, ResolveRead, + file::FileResolver, + gix_git::GitResolver, + reqwest_http::HTTPResolverAsync, + standard::{RemoteIndexResolver, StandardResolver}, + }, +}; + +// TODO: maybe adapt CombinedResolver for this use case? +/// A resolver for resolving specific dependency types: +/// - URL: schemes `file:`/`http(s)` +/// - path: relative path; this will only work for local projects, for remote projects +/// it doesn't make sense +/// - git: schemes `git:`/`ssh:` +/// - index +/// - resource: uses StandardResolver +pub struct TypedResolver { + file: FileResolver, + http: HTTPResolverAsync, + git: GitResolver, + index: RemoteIndexResolver, + resource: StandardResolver, +} + +// easyfind3 +// +// Resolver design questions: +// - should resolvers be split; if so, how?: +// - use unified for any usage +// - split out resource from specific types +// - split out every type +// - interface: +// - modify ResolveRead to take: +// - InterchangeProjectUsage +// - implementor-specific type +// - add inherent methods for each resolver, taking: +// - specific type +// - InterchangeProjectUsage (+ base path for path resolver) +// - should both kpar and src be supported for specific types? +// - resource will continue to support what it does +// - git: currently only src +// - http: currently supported +// - local (file url/path): currently supported +// - index: currently supported, new index will not support, so only kpar support is fine +// (provided that env->index is convenient) +// - interface style: +// - take broad, return err if wrong type +// - take specific (impractical to differentiate by type for resource) +// - take broad, return ResolutionOutcome::Unresolvable for wrong type (current impl for resource) +// - separate method for checking if supplied is acceptable - multiple calls for +// every resolve needed +// +// What about path usages: +// - allow absolute and relative +// - match Cargo, it resolves path usages taking a base path to be the Cargo.toml that +// declared them, not the root Cargo.toml +// - then dep resolution has to be modified to have project path (if known) for +// each project in dependency graph, at least for the time its direct usages +// are resolved +// - remote projects can't resolve path usages (no matter relative or absolute) +// - should both kpar and src be supported? +impl TypedResolver { + pub fn new( + file: FileResolver, + http: HTTPResolverAsync, + git: GitResolver, + index: RemoteIndexResolver, + resource: StandardResolver, + ) -> Self { + Self { + file, + http, + git, + index, + resource, + } + } + + pub fn resolve( + &self, + usage: &InterchangeProjectUsage, + ) -> Result, impl ErrorBound> { + match usage { + InterchangeProjectUsage::Resource { + resource, + version_constraint, + } => self.resource.resolve_read(resource), + InterchangeProjectUsage::Url { + url, + publisher, + name, + } => todo!(), + InterchangeProjectUsage::Path { + path, + publisher, + name, + } => todo!(), + InterchangeProjectUsage::Git { + git, + id, + publisher, + name, + } => todo!(), + InterchangeProjectUsage::Index { + publisher, + name, + version_constraint, + } => todo!(), + } + } +} diff --git a/core/src/solve/pubgrub.rs b/core/src/solve/pubgrub.rs index 33a801d3..88797e27 100644 --- a/core/src/solve/pubgrub.rs +++ b/core/src/solve/pubgrub.rs @@ -216,12 +216,12 @@ fn resolve_candidates( .resolve_read(uri) .map_err(InternalSolverError::Resolution)? { - crate::resolve::ResolutionOutcome::UnsupportedIRIType(msg) => { + crate::resolve::ResolutionOutcome::UnsupportedUsageType(msg) => { return Err(InternalSolverError::UnsupportedIriType(format!( "unsupported IRI type of `{uri}`: {msg}" ))); } - crate::resolve::ResolutionOutcome::Unresolvable(msg) => { + crate::resolve::ResolutionOutcome::NotFound(msg) => { return Err(InternalSolverError::NotFound(uri.as_str().into(), msg)); } crate::resolve::ResolutionOutcome::Resolved(alternatives) => { @@ -280,6 +280,7 @@ fn resolve_candidates( fn compute_deps( resolver: &R, + url_resolver: // TODO: URL may be file:, in that case we need FileResolver usages: &Vec, cache: &mut ResolvedCandidates, ) -> Result< diff --git a/sysand/src/cli.rs b/sysand/src/cli.rs index f0ca59fd..b6544a59 100644 --- a/sysand/src/cli.rs +++ b/sysand/src/cli.rs @@ -212,7 +212,7 @@ pub enum Command { visible_alias = "url", group = "location" )] - iri: Option>, + iri: Option>, /// Use the project with the given locator, trying to parse it as /// an IRI/URI/URL and otherwise falling back to using it as a path #[arg( @@ -249,17 +249,89 @@ pub enum Command { #[clap(verbatim_doc_comment)] Experimental { #[command(subcommand)] - subcommand: Option, + subcommand: ExpCommand, }, } +#[derive(clap::Subcommand, Debug, Clone)] pub enum ExpCommand { + /// Add a usage Add { - - } - Remove { - - } + #[command(subcommand)] + locator: ExpAddProjectLocatorArgs, + #[command(flatten)] + resolution_opts: ResolutionOptions, + }, + /// Remove a usage + Remove { publisher: String, name: String }, +} + +#[derive(clap::Subcommand, Debug, Clone)] +#[group(id = "expadd", required = true, multiple = false)] +pub enum ExpAddProjectLocatorArgs { + /// Add a project from HTTP(S) URL + Url { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// URL of the project. Can point to a KPAR or a project directory + url: Iri, + }, + // TODO: does it make sense to allow kpar or src? + /// Add a project from a local path + #[clap(verbatim_doc_comment)] + Path { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// Path to the project. Can be relative or absolute, and can point + /// to either a KPAR or a project directory + #[clap(verbatim_doc_comment)] + path: Utf8PathBuf, + }, + /// Add a project from an index + Index { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// Version constraint + // TODO: make this optional and default to latest stable version, like Cargo + version_constraint: VersionReq, + }, + /// Add a project from a git repository. + #[clap(verbatim_doc_comment)] + Git { + /// Publisher of the project + publisher: String, + /// Name of the project. Publisher and name + /// identify the project anywhere within the repository + #[clap(verbatim_doc_comment)] + name: String, + /// URL of the repository. If none of the `rev`/`tag`/`branch` are given, + /// latest rev of the default branch will be used. + #[clap(value_name = "URL", verbatim_doc_comment)] + git: Iri, + #[command(flatten)] + options: ExpGitOptions, + }, +} + +// TODO: make specifying these optional and infer +#[derive(clap::Args, Debug, Clone)] +#[group(required = true, multiple = false)] +pub struct ExpGitOptions { + /// Git revision, i.e. full commit hash + #[arg(long, default_value = None, conflicts_with_all = ["tag", "branch"])] + pub rev: Option, + /// Git tag + #[arg(long, default_value = None, conflicts_with_all = ["rev", "branch"])] + pub tag: Option, + /// Git branch. Will use latest revision (commit) of that branch + #[arg(long, default_value = None, conflicts_with_all = ["rev", "tag"])] + pub branch: Option, } #[derive(clap::Args, Debug, Clone)] @@ -267,7 +339,7 @@ pub enum ExpCommand { pub struct AddProjectLocatorArgs { /// IRI/URI/URL identifying the project to be used #[clap(default_value = None, value_parser = parse_iri_suggest_path)] - pub iri: Option>, + pub iri: Option>, /// Path to the project to be added. Since every usage is identified /// by an IRI, `file://` URL will be used to refer to the project. /// Warning: using this makes the project not portable between different @@ -287,7 +359,7 @@ pub struct AddProjectLocatorArgs { pub struct RemoveProjectLocatorArgs { /// IRI identifying the project usage to be removed #[clap(default_value = None, value_parser = parse_iri_suggest_path)] - pub iri: Option>, + pub iri: Option>, /// Path to the project to be removed from usages. Since every usage is /// identified by an IRI, the path will be transformed into a `file://` URL #[arg( @@ -312,7 +384,7 @@ pub struct CloneProjectLocatorArgs { pub auto_location: Option, /// IRI/URI/URL identifying the project to be cloned #[arg(short = 'i', long, visible_alias = "uri", visible_alias = "url")] - pub iri: Option>, + pub iri: Option>, /// Path to clone the project from. If version is also /// given, verifies that the project has the given version // TODO: allow somehow requiring to use git here @@ -556,7 +628,7 @@ pub enum InfoCommand { Website { /// Set the website. Must be a valid IRI/URI/URL #[arg(long, value_name = "URI", value_parser = parse_https_iri, default_value=None)] - set: Option>, + set: Option>, #[arg(long, default_value = None)] clear: bool, // Only for better error messages @@ -1338,7 +1410,7 @@ pub enum EnvCommand { /// Install project in `sysand_env` Install { /// IRI identifying the project to be installed - iri: fluent_uri::Iri, + iri: Iri, /// Version to be installed. Defaults to the latest /// version according to SemVer 2.0, ignoring pre-releases #[clap(verbatim_doc_comment)] @@ -1355,7 +1427,7 @@ pub enum EnvCommand { /// Uninstall project in `sysand_env` Uninstall { /// IRI identifying the project to be uninstalled - iri: fluent_uri::Iri, + iri: Iri, /// Version to be uninstalled version: Option, }, @@ -1368,7 +1440,7 @@ pub enum EnvCommand { /// IRI of the (already installed) project for which /// to enumerate source files #[clap(verbatim_doc_comment)] - iri: fluent_uri::Iri, + iri: Iri, /// Version of project to list sources for version: Option, @@ -1527,7 +1599,7 @@ pub struct GlobalOptions { /// Parse an IRI. Tolerates missing IRI scheme, uses /// `https://` scheme in that case. -fn parse_https_iri(s: &str) -> Result, fluent_uri::ParseError> { +fn parse_https_iri(s: &str) -> Result, fluent_uri::ParseError> { use fluent_uri::Iri; Iri::parse(s).map(Into::into).or_else(|original_err| { diff --git a/sysand/src/commands/add.rs b/sysand/src/commands/add.rs index 1c73a92a..87229648 100644 --- a/sysand/src/commands/add.rs +++ b/sysand/src/commands/add.rs @@ -17,7 +17,7 @@ use sysand_core::{ }, context::ProjectContext, lock::Source, - model::InterchangeProjectUsageRaw, + model::{GitId, InterchangeProjectUsageRaw}, project::{ ProjectRead, utils::{relativize_path, wrapfs}, @@ -27,7 +27,7 @@ use sysand_core::{ use crate::{ CliError, DEFAULT_INDEX_URL, - cli::{ProjectSourceOptions, ResolutionOptions}, + cli::{ExpAddProjectLocatorArgs, ProjectSourceOptions, ResolutionOptions}, commands::{lock::create_resolver, sync::command_sync}, }; @@ -111,8 +111,8 @@ pub fn command_add( } } } - ResolutionOutcome::UnsupportedIRIType(e) => bail!("unsupported URL `{url}`:\n{e}"), - ResolutionOutcome::Unresolvable(e) => { + ResolutionOutcome::UnsupportedUsageType(e) => bail!("unsupported URL `{url}`:\n{e}"), + ResolutionOutcome::NotFound(e) => { bail!("failed to resolve URL `{url}`:\n{e}") } } @@ -307,3 +307,270 @@ fn get_relative + AsRef>( }; Ok(src_path) } + +// TODO: Collect common arguments +#[allow(clippy::too_many_arguments)] +pub fn command_add_experimental( + locator: ExpAddProjectLocatorArgs, + // no_lock: bool, + // no_sync: bool, + resolution_opts: ResolutionOptions, + // TODO: figure out how to adapt these + // source_opts: Box, + mut config: Config, + config_file: Option, + no_config: bool, + ctx: ProjectContext, + client: reqwest_middleware::ClientWithMiddleware, + runtime: Arc, + auth_policy: Arc, +) -> Result<()> { + let provided_iris = if !resolution_opts.include_std { + let sysml_std = crate::known_std_libs(); + sysml_std + } else { + HashMap::default() + }; + + // let iri = iri.as_ref(); + let mut current_project = ctx + .current_project + .clone() + .ok_or(CliError::MissingProjectCurrentDir)?; + + // #[allow(clippy::manual_map)] // For readability and compactness + // let source = if let Some(path) = source_opts.from_path { + // let metadata = wrapfs::metadata(&path)?; + // if metadata.is_dir() { + // Some(Source::LocalSrc { + // src_path: get_relative(path, current_project.root_path())? + // .as_str() + // .into(), + // }) + // } else if metadata.is_file() { + // Some(Source::LocalKpar { + // kpar_path: get_relative(path, current_project.root_path())? + // .as_str() + // .into(), + // }) + // } else { + // bail!("path `{path}` is neither a directory nor a file"); + // } + // } else if let Some(url) = source_opts.from_url { + // let ResolutionOptions { + // index, + // default_index, + // no_index, + // include_std: _, + // } = resolution_opts.clone(); + + // let index_urls = if no_index { + // None + // } else { + // Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) + // }; + // let std_resolver = standard_resolver( + // None, + // None, + // Some(client.clone()), + // index_urls, + // runtime.clone(), + // auth_policy.clone(), + // ); + // let outcome = std_resolver.resolve_read_raw(&url)?; + // let mut source = None; + // match outcome { + // ResolutionOutcome::Resolved(alternatives) => { + // for candidate in alternatives { + // match candidate { + // Ok(project) => { + // source = project.sources(&ctx)?.first().cloned(); + // if source.is_some() { + // break; + // } + // } + // Err(err) => { + // log::debug!("skipping candidate project: {err}"); + // } + // } + // } + // } + // ResolutionOutcome::UnsupportedIRIType(e) => bail!("unsupported URL `{url}`:\n{e}"), + // ResolutionOutcome::Unresolvable(e) => { + // bail!("failed to resolve URL `{url}`:\n{e}") + // } + // } + // if source.is_none() { + // bail!("unable to find project at URL `{url}`") + // } + // source + // } else if let Some(editable) = source_opts.as_editable { + // Some(Source::Editable { + // editable: get_relative(editable, current_project.root_path())? + // .as_str() + // .into(), + // }) + // } else if let Some(src_path) = source_opts.as_local_src { + // Some(Source::LocalSrc { + // src_path: get_relative(src_path, current_project.root_path())? + // .as_str() + // .into(), + // }) + // } else if let Some(kpar_path) = source_opts.as_local_kpar { + // Some(Source::LocalKpar { + // kpar_path: get_relative(kpar_path, current_project.root_path())? + // .as_str() + // .into(), + // }) + // } else if let Some(remote_src) = source_opts.as_remote_src { + // Some(Source::RemoteSrc { + // remote_src: remote_src.into_string(), + // }) + // } else if let Some(remote_kpar) = source_opts.as_remote_kpar { + // Some(Source::RemoteKpar { + // remote_kpar: remote_kpar.into_string(), + // remote_kpar_size: None, + // }) + // // TODO: make all --as-* use new-style usages unconditionally, otherwise will need two impl for them + // } else if let Some(remote_git) = source_opts.as_remote_git { + // Some(Source::RemoteGit { + // remote_git: remote_git.into_string(), + // rev: todo!(), + // path: todo!(), + // }) + // } else { + // None + // }; + + // if let Some(source) = source { + // let config_path = config_file + // .map(Utf8PathBuf::from) + // .or((!no_config).then(|| current_project.root_path().join(CONFIG_FILE))); + + // if let Some(path) = config_path { + // add_project_source_to_config(&path, iri, &source)?; + // } else { + // log::warn!("project source for `{iri}` not added to any config file"); + // } + + // config.projects.push(ConfigProject { + // identifiers: vec![iri.to_owned()], + // sources: vec![source], + // }); + // } + + let usage = match locator { + ExpAddProjectLocatorArgs::Url { + publisher, + name, + url, + } => { + // Currently std libs are only identified by IRIs + if provided_iris.contains_key(url.as_str()) { + crate::logger::warn_std(url.as_str()); + return Ok(()); + } + let usage = InterchangeProjectUsageRaw::Url { + url: url.into_string(), + publisher, + name, + }; + usage + } + ExpAddProjectLocatorArgs::Path { + publisher, + name, + path, + } => { + let usage = InterchangeProjectUsageRaw::Path { + // TODO: turn path into relative to current workspace/project root + path: path.into(), + publisher, + name, + }; + usage + } + ExpAddProjectLocatorArgs::Index { + publisher, + name, + version_constraint, + } => { + // TODO: don't use raw usage here, we already parsed the types + let usage = InterchangeProjectUsageRaw::Index { + publisher, + name, + version_constraint: version_constraint.to_string(), + }; + usage + } + ExpAddProjectLocatorArgs::Git { + publisher, + name, + git, + options, + } => { + let id = if let Some(rev) = options.rev { + GitId::Rev(rev) + } else if let Some(tag) = options.tag { + GitId::Tag(tag) + } else if let Some(branch) = options.branch { + GitId::Branch(branch) + } else { + unreachable!() + }; + let usage = InterchangeProjectUsageRaw::Git { + git: git.into_string(), + id, + publisher, + name, + }; + usage + } + }; + + // if !no_lock { + let info_path = current_project.info_path(); + let info_backup = wrapfs::read_to_string(&info_path)?; + match do_add(&mut current_project, &usage) { + Ok(added) => { + if !added { + return Ok(()); + } + } + Err(e) => return Err(e.into()), + } + + let alias_iris = if let Some(w) = &ctx.current_workspace { + w.projects() + .iter() + .find(|p| Path::new(&p.path) == current_project.root_path()) + .map(|p| p.iris.to_owned()) + } else { + None + }; + + match resolve_deps( + // no_sync, + false, + resolution_opts, + &config, + client, + runtime, + auth_policy, + current_project.root_path(), + alias_iris, + provided_iris, + &ctx, + ) { + Ok(_) => Ok(()), + Err(e) => { + // Restore old info + wrapfs::write(&info_path, info_backup)?; + Err(e) + } + } + // } else { + // do_add(&mut current_project, &usage_raw)?; + // Ok(()) + // } +} diff --git a/sysand/src/commands/clone.rs b/sysand/src/commands/clone.rs index cfce6f2a..8f792bde 100644 --- a/sysand/src/commands/clone.rs +++ b/sysand/src/commands/clone.rs @@ -380,12 +380,12 @@ pub fn get_project_version( } } } - ResolutionOutcome::UnsupportedIRIType(e) => bail!( + ResolutionOutcome::UnsupportedUsageType(e) => bail!( "IRI scheme `{}` of `{}` is not supported: {e}", iri.scheme(), iri ), - ResolutionOutcome::Unresolvable(e) => { + ResolutionOutcome::NotFound(e) => { bail!("failed to resolve project `{iri}`: {e}") } } diff --git a/sysand/src/commands/remove.rs b/sysand/src/commands/remove.rs index 8368534f..4ce4b548 100644 --- a/sysand/src/commands/remove.rs +++ b/sysand/src/commands/remove.rs @@ -8,7 +8,7 @@ use sysand_core::{ config::local_fs::{CONFIG_FILE, remove_project_source_from_config}, context::ProjectContext, model::InterchangeProjectUsageG, - remove::do_remove, + remove::{do_remove, do_remove_experimental}, }; use crate::CliError; @@ -27,6 +27,8 @@ pub fn command_remove>( .map(Utf8PathBuf::from) .or((!no_config).then(|| current_project.root_path().join(CONFIG_FILE))); + // TODO: this is trickier, the project may appear as a transitive dep, + // so it's not always correct to remove the override if let Some(path) = config_path { remove_project_source_from_config(path, &iri)?; } @@ -52,27 +54,7 @@ pub fn command_remove>( log::info!("{header}{removed:>12}{header:#} `{}`", resource,); } }, - InterchangeProjectUsageG::Url { - url, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Path { - path, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Git { - git, - id, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Index { - publisher, - name, - version_constraint, - } => todo!(), + _ => (), } } else { log::info!("{header}{removed:>12}{header:#}:"); @@ -94,28 +76,77 @@ pub fn command_remove>( log::info!("{:>13} `{}`", ' ', resource,); } }, + _ => (), + } + } + } + + Ok(()) +} + +pub fn command_remove_experimental( + publisher: String, + name: String, + ctx: ProjectContext, + config_file: Option, + no_config: bool, +) -> Result<()> { + let mut current_project = ctx + .current_project + .ok_or(CliError::MissingProjectCurrentDir)?; - InterchangeProjectUsageG::Url { - url, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Path { - path, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Git { - git, - id, - publisher, - name, - } => todo!(), - InterchangeProjectUsageG::Index { - publisher, - name, + let config_path = config_file + .map(Utf8PathBuf::from) + .or((!no_config).then(|| current_project.root_path().join(CONFIG_FILE))); + + // if let Some(path) = config_path { + // remove_project_source_from_config(path, &iri)?; + // } + + let usages = do_remove_experimental(&mut current_project, &publisher, &name)?; + + let removed = "Removed"; + let header = sysand_core::style::get_style_config().header; + if let [usage] = usages.as_slice() { + match usage { + InterchangeProjectUsageG::Resource { + resource, + version_constraint, + } => match version_constraint { + Some(vc) => { + log::info!( + "{header}{removed:>12}{header:#} `{}` with version constraints `{}`", + resource, + vc + ); + } + None => { + log::info!("{header}{removed:>12}{header:#} `{}`", resource,); + } + }, + _ => (), + } + } else { + log::info!("{header}{removed:>12}{header:#}:"); + for usage in usages { + match usage { + InterchangeProjectUsageG::Resource { + resource, version_constraint, - } => todo!(), + } => match version_constraint { + Some(vc) => { + log::info!( + "{:>13} `{}` with version constraints `{}`", + ' ', + resource, + vc + ); + } + None => { + log::info!("{:>13} `{}`", ' ', resource,); + } + }, + _ => (), } } } diff --git a/sysand/src/lib.rs b/sysand/src/lib.rs index 2e47df5e..6b27abe6 100644 --- a/sysand/src/lib.rs +++ b/sysand/src/lib.rs @@ -45,9 +45,9 @@ use sysand_core::{ use url::Url; use crate::{ - cli::{Args, Command, InfoCommand}, + cli::{Args, Command, ExpAddProjectLocatorArgs, ExpCommand, InfoCommand}, commands::{ - add::command_add, + add::{command_add, command_add_experimental}, build::{command_build_for_project, command_build_for_workspace}, env::{ command_env, command_env_install, command_env_install_path, command_env_list, @@ -59,7 +59,7 @@ use crate::{ init::command_init, lock::command_lock, print_root::command_print_root, - remove::command_remove, + remove::{command_remove, command_remove_experimental}, sources::{command_sources_env, command_sources_project}, sync::command_sync, }, @@ -713,6 +713,60 @@ pub fn run_cli(args: cli::Args) -> Result<()> { runtime, basic_auth_policy, ), + Command::Experimental { subcommand } => { + match subcommand { + ExpCommand::Add { + locator, + resolution_opts, + } => { + // Command::Add { + // locator, + // version_constraint, + // no_lock, + // no_sync, + // resolution_opts, + // source_opts, + // } => { + // let iri = iri_or_path_to_iri(locator.iri, locator.path)?; + // command_add( + // iri, + // version_constraint, + // no_lock, + // no_sync, + // resolution_opts, + // source_opts, + // config, + // args.global_opts.config_file, + // args.global_opts.no_config, + // ctx, + // client, + // runtime, + // basic_auth_policy, + // ) + // } + command_add_experimental( + locator, + // no_lock, + // no_sync, + resolution_opts, + config, + args.global_opts.config_file, + args.global_opts.no_config, + ctx, + client, + runtime, + basic_auth_policy, + ) + } + ExpCommand::Remove { publisher, name } => command_remove_experimental( + publisher, + name, + ctx, + args.global_opts.config_file, + args.global_opts.no_config, + ), + } + } } } diff --git a/sysand/tests/cli_lock.rs b/sysand/tests/cli_lock.rs index c9010e64..a15ccf19 100644 --- a/sysand/tests/cli_lock.rs +++ b/sysand/tests/cli_lock.rs @@ -224,7 +224,7 @@ fn inject_usages_versions< )?; for (usage, version_req) in usages { - info.usage.push(InterchangeProjectUsageRaw { + info.usage.push(InterchangeProjectUsageRaw::Resource { resource: usage.as_ref().to_string(), version_constraint: version_req.map(|x| x.as_ref().to_string()), }); From 1b3ac4e0f4b506386ab8587c7101dabe34222bd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andrius=20Puk=C5=A1ta?= Date: Mon, 13 Apr 2026 15:48:42 +0300 Subject: [PATCH 5/5] WIP MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrius Pukšta --- bindings/py/pyproject.toml | 4 +- core/src/commands/lock.rs | 31 +-- core/src/commands/sync.rs | 45 ++-- core/src/env/local_directory/metadata.rs | 12 +- core/src/lock.rs | 20 +- core/src/model.rs | 44 +++- core/src/project/editable.rs | 8 + core/src/project/gix_git_download.rs | 220 +++++++++++------- core/src/project/local_src.rs | 4 + core/src/project/memory.rs | 8 + core/src/project/mod.rs | 61 ++++- core/src/project/reference.rs | 12 +- core/src/project/utils.rs | 45 +++- core/src/resolve/combined.rs | 97 ++++---- core/src/resolve/env.rs | 25 +- core/src/resolve/file.rs | 66 ++++-- core/src/resolve/gix_git.rs | 139 ++++++++---- core/src/resolve/memory.rs | 38 +++- core/src/resolve/mod.rs | 2 +- core/src/resolve/reqwest_http.rs | 70 ++++-- core/src/resolve/typed_resolver.rs | 2 + core/src/solve/pubgrub.rs | 278 +++++++++++------------ macros/src/lib.rs | 27 +++ sysand/src/cli.rs | 134 ++++++++++- sysand/src/commands/add.rs | 142 ++---------- sysand/src/commands/clone.rs | 233 ++++++++++++++++++- sysand/src/commands/env.rs | 23 +- sysand/src/commands/sync.rs | 6 +- sysand/src/lib.rs | 17 ++ 29 files changed, 1241 insertions(+), 572 deletions(-) diff --git a/bindings/py/pyproject.toml b/bindings/py/pyproject.toml index b5789be8..7ca73bfb 100644 --- a/bindings/py/pyproject.toml +++ b/bindings/py/pyproject.toml @@ -59,6 +59,6 @@ module-name = "sysand._sysand_core" # # ref: https://docs.pytest.org/en/stable/ # -[tool.pytest.ini_options] -addopts = "--verbose --color=yes --durations=10" +[tool.pytest] +addopts = ["--verbose", "--color=yes", "--durations=10"] testpaths = ["tests"] diff --git a/core/src/commands/lock.rs b/core/src/commands/lock.rs index 2590e92f..e7bd27fc 100644 --- a/core/src/commands/lock.rs +++ b/core/src/commands/lock.rs @@ -1,6 +1,7 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 +use camino::Utf8PathBuf; use fluent_uri::Iri; use std::{ collections::{HashMap, HashSet}, @@ -17,9 +18,13 @@ pub const DEFAULT_LOCKFILE_NAME: &str = "sysand-lock.toml"; use crate::project::{editable::EditableProject, local_src::LocalSrcProject, utils::ToPathBuf}; use crate::{ context::ProjectContext, - lock::{Lock, Project, Usage, hash_str}, + lock::{Lock, Project, hash_str}, model::{InterchangeProjectUsage, InterchangeProjectUsageG, InterchangeProjectValidationError}, - project::{CanonicalizationError, ProjectRead, memory::InMemoryProject, utils::FsIoError}, + project::{ + CanonicalizationError, ProjectRead, + memory::InMemoryProject, + utils::{FsIoError, Identifier}, + }, resolve::ResolveRead, solve::pubgrub::{SolverError, solve}, }; @@ -64,7 +69,7 @@ pub enum LockError { pub struct LockOutcome { pub lock: Lock, - pub dependencies: Vec<(fluent_uri::Iri, PD)>, + pub dependencies: Vec<(Identifier, PD)>, } /// Generates a lockfile by solving for a (compatible) set of interchange projects @@ -85,6 +90,7 @@ pub fn do_lock_projects< R: ResolveRead + Debug, >( projects: I, + base_path: Option, resolver: R, provided_iris: &HashMap>, ctx: &ProjectContext, @@ -113,15 +119,13 @@ pub fn do_lock_projects< .map_err(LockProjectError::InputProjectError)?; debug_assert!(!sources.is_empty()); - // TODO :this needs rethinking. How to map deps from InterchangeProjectUsage to proper Usage string? - // This cannot be done before resolving them lock.projects.push(Project { name: Some(info.name), publisher: info.publisher, version: info.version, exports: meta.index.into_keys().collect(), identifiers: identifiers - .map(|ids| ids.into_iter().map(|id| id.into_string()).collect()) + .map(|ids| ids.into_iter().map(Identifier::from).collect()) .unwrap_or_default(), checksum: canonical_hash, sources, @@ -139,7 +143,7 @@ pub fn do_lock_projects< all_deps.extend(usages); } - let lock_outcome = do_lock_extend(lock, all_deps, resolver, provided_iris, ctx)?; + let lock_outcome = do_lock_extend(lock, all_deps, base_path, resolver, provided_iris, ctx)?; Ok(lock_outcome) } @@ -161,13 +165,14 @@ pub fn do_lock_extend< >( mut lock: Lock, usages: I, + base_path: Option, resolver: R, provided_iris: &HashMap>, ctx: &ProjectContext, ) -> Result, LockError> { let inputs: Vec<_> = usages.into_iter().collect(); let mut dependencies = vec![]; - let solution = solve(inputs, resolver).map_err(LockError::Solver)?; + let solution = solve(inputs, base_path, resolver).map_err(LockError::Solver)?; let mut lock_projects = HashSet::new(); let mut lock_symbols = HashMap::new(); for (i, p) in lock.projects.iter().enumerate() { @@ -186,13 +191,13 @@ pub fn do_lock_extend< } } - for (iri, (info, meta, project)) in solution { + for (identifier, (info, meta, project)) in solution { let canonical_hash = project .checksum_canonical_hex() .map_err(LockError::DependencyProjectCanonicalization)? .ok_or_else(|| LockError::IncompleteInputProject(format!("\n{:?}", project)))?; - let sources = if !provided_iris.contains_key(iri.as_str()) { + let sources = if !provided_iris.contains_key(identifier.as_str()) { let sources = project.sources(ctx).map_err(LockError::DependencyProject)?; debug_assert!(!sources.is_empty()); sources @@ -207,7 +212,7 @@ pub fn do_lock_extend< publisher: info.publisher, version: info.version.to_string(), exports: meta.index.into_keys().collect(), - identifiers: vec![iri.to_string()], + identifiers: vec![identifier], checksum: canonical_hash, sources, usages: info @@ -219,7 +224,7 @@ pub fn do_lock_extend< if lock_projects.contains(&lock_project.hash_val()) { log::debug!( "not adding project `{}` ({}) to lock, as lock already contains it", - iri, + identifier, lock_project.version ); } else { @@ -243,7 +248,7 @@ pub fn do_lock_extend< lock.projects.push(lock_project); } - dependencies.push((iri, project)); + dependencies.push((identifier, project)); } Ok(LockOutcome { lock, dependencies }) diff --git a/core/src/commands/sync.rs b/core/src/commands/sync.rs index 3a92499b..9c521763 100644 --- a/core/src/commands/sync.rs +++ b/core/src/commands/sync.rs @@ -10,7 +10,7 @@ use crate::{ commands::env::do_env_install_project, env::{ReadEnvironment, WriteEnvironment, utils::ErrorBound}, lock::{Lock, Source}, - project::{ProjectRead, memory::InMemoryProject}, + project::{ProjectRead, memory::InMemoryProject, utils::Identifier}, }; #[derive(Error, Debug)] @@ -20,7 +20,7 @@ pub enum SyncError { #[error("project with IRI `{0}` is missing `.project.json` or `.meta.json`")] BadProject(String), #[error("project with IRI(s) {0:?} has no known sources in lockfile")] - MissingSource(Box<[String]>), + MissingSource(Box<[Identifier]>), #[error("no IRI given for project with src_path = `{0}` in lockfile")] MissingIriSrcPath(Box), #[error("no IRI given for project with remote_src = `{0}` in lockfile")] @@ -54,7 +54,9 @@ pub enum SyncError { #[error("invalid remote source URL `{0}`:\n{1}")] InvalidRemoteSource(Box, UrlParseError), #[error("no supported sources for project with IRI `{0}`")] - UnsupportedSources(String), + UnsupportedSourcesId(Identifier), + #[error("no supported sources for project with publisher {0:?}, name {1:?}")] + UnsupportedSourcesPubName(Option, Option), #[error("failed to install project `{uri}`:\n{cause}")] InstallFail { uri: Box, cause: String }, #[error( @@ -105,7 +107,7 @@ where KParPathStorage: ProjectRead, CreateRemoteKParStorage: Fn(String) -> Result, RemoteKParStorage: ProjectRead, - CreateRemoteGitStorage: Fn(String) -> Result, + CreateRemoteGitStorage: Fn(&str, String, Option) -> Result, RemoteGitStorage: ProjectRead, { let syncing = "Syncing"; @@ -118,11 +120,13 @@ where let main_uri = project.identifiers.first(); for iri in &project.identifiers { - let excluded_versions = if let Ok(parsed_iri) = fluent_uri::Iri::parse(iri.clone()) { - provided_iris.get(parsed_iri.normalize().as_str()) - } else { - provided_iris.get(iri.as_str()) - }; + // TODO: revisit this when dealing with IRI normalization + let excluded_versions = + if let Ok(parsed_iri) = fluent_uri::Iri::parse(iri.as_str().clone()) { + provided_iris.get(parsed_iri.normalize().as_str()) + } else { + provided_iris.get(iri.as_str()) + }; let checksum = &project.checksum; if let Some(versions) = excluded_versions { @@ -226,7 +230,7 @@ where } Source::RemoteGit { remote_git, - // TODO: implement (shallow) cloning of the sepecified rev and dir traversal + // TODO: implement (shallow) cloning of the specified rev and dir traversal rev, path, } => { @@ -236,23 +240,26 @@ where let remote_git_storage = remote_git_storage.as_ref().ok_or_else(|| { SyncError::MissingRemoteGitStorage(remote_git.as_str().into()) })?; - let storage = remote_git_storage(remote_git.clone()) - .map_err(|e| SyncError::GitDownload(remote_git.as_str().into(), e))?; + let storage = + remote_git_storage(remote_git.as_str(), rev.to_owned(), path.to_owned()) + .map_err(|e| SyncError::GitDownload(remote_git.as_str().into(), e))?; log::debug!("trying to install `{uri}` from remote_git: {remote_git}"); try_install(uri, &project.checksum, storage, env)?; - } - _ => supported = false, + } // _ => supported = false, } if supported { no_supported = false; } } if no_supported { - return Err(SyncError::UnsupportedSources( - main_uri - .cloned() - .unwrap_or_else(|| "project without IRI".to_string()), - )); + if let Some(id) = main_uri { + return Err(SyncError::UnsupportedSourcesId(id.to_owned())); + } else { + return Err(SyncError::UnsupportedSourcesPubName( + project.publisher.clone(), + project.name.clone(), + )); + } } updated = true; } diff --git a/core/src/env/local_directory/metadata.rs b/core/src/env/local_directory/metadata.rs index 41c2e429..6e545ab0 100644 --- a/core/src/env/local_directory/metadata.rs +++ b/core/src/env/local_directory/metadata.rs @@ -15,7 +15,7 @@ use crate::{ lock::{Lock, ResolutionError, Source, multiline_array}, project::{ local_src::{LocalSrcError, LocalSrcProject}, - utils::{FsIoError, ToUnixPathBuf, deserialize_unix_path, wrapfs}, + utils::{FsIoError, Identifier, ToUnixPathBuf, deserialize_unix_path, wrapfs}, }, }; @@ -156,7 +156,7 @@ impl EnvMetadata { doc } - fn find_project(&self, identifiers: &[String], version: &String) -> Option { + fn find_project(&self, identifiers: &[Identifier], version: &String) -> Option { for (index, project) in self.projects.iter().enumerate() { if &project.version == version && project @@ -181,18 +181,18 @@ impl EnvMetadata { pub fn remove_project, V: AsRef>(&mut self, iri: S, version: Option) { if let Some(v) = version { self.projects.retain(|p| { - p.version != v.as_ref() || !p.identifiers.iter().any(|i| i == iri.as_ref()) + p.version != v.as_ref() || !p.identifiers.iter().any(|i| i.as_str() == iri.as_ref()) }); } else { self.projects - .retain(|p| !p.identifiers.iter().any(|i| i == iri.as_ref())); + .retain(|p| !p.identifiers.iter().any(|i| i.as_str() == iri.as_ref())); } } /// Add `LocalSrcProject` to env. Must have `nominal_path` set. pub fn add_local_project( &mut self, - identifiers: Vec, + identifiers: Vec, project: LocalSrcProject, editable: bool, workspace: bool, @@ -251,7 +251,7 @@ pub struct EnvProject { /// is the IRI it is installed as. The rest are considered /// as aliases. Can only be empty for `editable` projects. #[serde(default)] - pub identifiers: Vec, + pub identifiers: Vec, /// Usages of the project. Intended for tools needing to /// track the interdependence of project in the environment. #[serde(default)] diff --git a/core/src/lock.rs b/core/src/lock.rs index 3662498b..6cc85fc6 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -402,9 +402,8 @@ pub struct Project { pub version: String, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub exports: Vec, - // TODO: what to do about identifiers for non-IRI/URL projects? #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub identifiers: Vec, + pub identifiers: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub usages: Vec, #[serde(skip_serializing_if = "Vec::is_empty", default)] @@ -451,6 +450,12 @@ pub fn hash_str(val: &str) -> StrHash { StrHash(hasher.finish()) } +impl From<&Identifier> for toml_edit::Value { + fn from(value: &Identifier) -> Self { + Self::from(value.as_str()) + } +} + impl Project { pub fn to_toml(&self) -> Table { let mut table = Table::new(); @@ -876,9 +881,12 @@ mod tests { use toml_edit::DocumentMut; use typed_path::Utf8UnixPathBuf; - use crate::lock::{ - CURRENT_LOCK_VERSION, LOCKFILE_PREFIX, Lock, Project, Source, Usage, ValidationError, - VersionError, check_lock_version, project_with, + use crate::{ + lock::{ + CURRENT_LOCK_VERSION, LOCKFILE_PREFIX, Lock, Project, Source, Usage, ValidationError, + VersionError, check_lock_version, project_with, + }, + project::utils::Identifier, }; const CHECKSUM: &str = "0000000000000000000000000000000000000000000000000000000000000000"; @@ -1075,7 +1083,7 @@ checksum = "{CHECKSUM}" publisher: None, version: "0.2.1".to_string(), exports: vec![], - identifiers: vec!["urn:kpar:example".to_string()], + identifiers: vec![Identifier::from_iri_unchecked_str("urn:kpar:example")], usages: vec![], sources: vec![], checksum: CHECKSUM.to_string(), diff --git a/core/src/model.rs b/core/src/model.rs index 4aa11538..5af68c88 100644 --- a/core/src/model.rs +++ b/core/src/model.rs @@ -8,6 +8,7 @@ use digest::{generic_array::GenericArray, typenum}; use indexmap::IndexMap; #[cfg(feature = "python")] use pyo3::{FromPyObject, IntoPyObject, pyclass}; +use semver::VersionReq; use serde::{Deserialize, Serialize}; use thiserror::Error; use typed_path::{Utf8UnixPath, Utf8UnixPathBuf}; @@ -82,6 +83,7 @@ pub const KNOWN_METAMODELS: [&str; 2] = [ // } // } +// TODO: maybe make this generic over AsRef? #[derive(Eq, Clone, PartialEq, Serialize, Deserialize, Hash, Debug)] #[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))] #[serde(rename_all = "camelCase", untagged)] @@ -166,7 +168,7 @@ impl Display } => { write!(f, "IRI `{resource}`")?; if let Some(vc) = version_constraint { - write!(f, " {vc}")?; + write!(f, " ({vc})")?; } } InterchangeProjectUsageG::Url { @@ -205,6 +207,20 @@ impl Display } impl InterchangeProjectUsageG { + pub fn from_iri(iri: Iri) -> Self { + InterchangeProjectUsageG::Resource { + resource: iri, + version_constraint: None, + } + } + + pub fn from_iri_version(iri: Iri, version: VersionReq) -> Self { + InterchangeProjectUsageG::Resource { + resource: iri, + version_constraint: Some(version), + } + } + /// Get the canonical IRI representing this usage. This IRI is not resolvable /// on its own. /// This is expensive, don't call repeatedly @@ -239,6 +255,32 @@ impl InterchangeProjectUsageG bool { + match self { + InterchangeProjectUsage::Resource { + resource: _, + version_constraint, + } => { + if let Some(vc) = version_constraint { + vc.matches(version) + } else { + true + } + } + InterchangeProjectUsage::Index { + publisher: _, + name: _, + version_constraint, + } => version_constraint.matches(version), + InterchangeProjectUsage::Url { .. } => true, + InterchangeProjectUsage::Path { .. } => true, + InterchangeProjectUsage::Git { .. } => true, + } + } +} + impl InterchangeProjectUsageRaw { // TODO: consolidate to `try_from()`? pub fn validate(&self) -> Result { diff --git a/core/src/project/editable.rs b/core/src/project/editable.rs index f6c731e2..aa056c79 100644 --- a/core/src/project/editable.rs +++ b/core/src/project/editable.rs @@ -82,4 +82,12 @@ impl ProjectRead for EditableProject

{ Ok(inner_sources) } + + fn project_root(&self) -> Option<&camino::Utf8Path> { + self.inner.project_root() + } + + fn base_path_for_usage_resolver(&self) -> Option<&camino::Utf8Path> { + self.inner.base_path_for_usage_resolver() + } } diff --git a/core/src/project/gix_git_download.rs b/core/src/project/gix_git_download.rs index fa936f2a..287e22ee 100644 --- a/core/src/project/gix_git_download.rs +++ b/core/src/project/gix_git_download.rs @@ -7,20 +7,15 @@ use camino::{Utf8Path, Utf8PathBuf}; use camino_tempfile::Utf8TempDir; use gix::{ prepare_clone, - progress::{self, Discard}, - remote::{ - Direction, - fetch::{self, Shallow}, - }, + remote::{Direction, fetch::Shallow}, }; -use serde::Deserialize; use thiserror::Error; use walkdir::WalkDir; use crate::{ context::ProjectContext, lock::Source, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{GitId, InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::{ ProjectRead, local_src::{LocalSrcError, LocalSrcProject, PathError}, @@ -174,93 +169,114 @@ impl From for GixDownloadedError { } impl GixDownloadedProjectExact { - /// Immediately clone the repo and try to find the project publisher/name + /// Immediately clone the repo and try to find the project publisher/name, + /// if present pub fn new_download_find>( url: S, - rev: Option, - publisher: impl AsRef, - name: impl AsRef, + rev: Option<&GitId>, + pub_name: Option<(impl AsRef, impl AsRef)>, ) -> Result { let url = url.as_ref(); - let publisher = publisher.as_ref(); - let name = name.as_ref(); let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; - let repo = download_repo_to_temp(&tmp_dir, url, rev.as_deref())?; + let (repo, rev) = download_repo_to_temp(&tmp_dir, url, rev)?; - let rev = rev.unwrap_or_else(|| repo.head_commit().unwrap().id().to_string()); + let rev = rev + .unwrap_or_else(|| repo.head_commit().unwrap().id().detach()) + .to_string(); // TODO: find specified project in repo and convert to path // TODO: Since gix provides a way to iterate over non-checked-out files, // checkout may not be necessary. - // Check every `.project.json` file - for entry in WalkDir::new(tmp_dir.path()) - .into_iter() - .filter_entry(|entry| entry.file_name() == ".git") - { - match entry { - Ok(entry) => { - if !entry.file_type().is_file() || entry.path().ends_with(".project.json") { - continue; - } - let Some(path) = entry.path().to_str() else { - log::debug!( - "ignoring path `{}` as it contains invalid Unicode", - entry.path().display() - ); - continue; - }; - let info: InterchangeProjectInfoRaw = - match serde_json::from_reader(wrapfs::File::open(path)?) { - Ok(info) => info, - Err(e) => { - log::debug!( - "ignoring file `{}` due to error: {e}", - entry.path().display() - ); - continue; - } - }; - - if info.publisher.as_deref() == Some(publisher) && info.name == name { - // FOUND - // let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; - // Append path inside the repo, as it will be cloned to the temp dir - let downloaded_project = LocalSrcProject { - nominal_path: None, - project_path: entry.path().parent().unwrap().to_str().unwrap().into(), + if let Some((publisher, name)) = pub_name { + let (publisher, name) = (publisher.as_ref(), name.as_ref()); + // Check every `.project.json` file + for entry in WalkDir::new(tmp_dir.path()) + .into_iter() + .filter_entry(|entry| entry.file_name() == ".git") + { + match entry { + Ok(entry) => { + if !entry.file_type().is_file() || entry.path().ends_with(".project.json") { + continue; + } + let Some(path) = entry.path().to_str() else { + log::debug!( + "ignoring path `{}` as it contains invalid Unicode", + entry.path().display() + ); + continue; }; - let path_in_repo = downloaded_project - .project_path - .strip_prefix(tmp_dir.path()) - .unwrap(); - return Ok(GixDownloadedProjectExact { - url: url.to_owned(), - rev, - path: if path_in_repo.as_str().is_empty() { - None - } else { - Some(path_in_repo.to_owned()) - }, - inner: downloaded_project, - tmp_dir, - }); + let info: InterchangeProjectInfoRaw = + match serde_json::from_reader(wrapfs::File::open(path)?) { + Ok(info) => info, + Err(e) => { + log::debug!( + "ignoring file `{}` due to error: {e}", + entry.path().display() + ); + continue; + } + }; + + if info.publisher.as_deref() == Some(publisher) && info.name == name { + // let mut canonical_temp = wrapfs::canonicalize(tmp_dir.path())?; + // Append path inside the repo, as it will be cloned to the temp dir + let downloaded_project = LocalSrcProject { + nominal_path: None, + project_path: entry + .path() + .parent() + .unwrap() + .to_str() + .unwrap() + .into(), + }; + let path_in_repo = downloaded_project + .project_path + .strip_prefix(tmp_dir.path()) + .unwrap(); + return Ok(GixDownloadedProjectExact { + url: url.to_owned(), + rev, + path: if path_in_repo.as_str().is_empty() { + None + } else { + Some(path_in_repo.to_owned()) + }, + inner: downloaded_project, + tmp_dir, + }); + } + } + Err(e) => { + log::debug!("skipping path due to error: {e}"); } - } - Err(e) => { - log::debug!("skipping path due to error: {e}"); } } - } - Err(GixDownloadedError::ProjectNotFound { - repo_url: url.into(), - rev: rev.into(), - publisher: publisher.into(), - name: name.into(), - }) + Err(GixDownloadedError::ProjectNotFound { + repo_url: url.into(), + rev: rev.into(), + publisher: publisher.into(), + name: name.into(), + }) + } else { + let inner = LocalSrcProject { + nominal_path: None, + project_path: tmp_dir.path().to_owned(), + }; + return Ok(GixDownloadedProjectExact { + url: url.to_owned(), + rev, + path: None, + tmp_dir, + inner, + }); + } } + /// For sync use. /// `path` must be relative path inside repo pub fn new_download>( url: S, @@ -269,7 +285,8 @@ impl GixDownloadedProjectExact { ) -> Result { let url = url.as_ref(); let tmp_dir = camino_tempfile::tempdir().map_err(FsIoError::MkTempDir)?; - let _repo = download_repo_to_temp(&tmp_dir, url, Some(&rev))?; + // TODO: don't clone rev + let (_repo, _rev) = download_repo_to_temp(&tmp_dir, url, Some(&GitId::Rev(rev.clone())))?; let downloaded_project = LocalSrcProject { nominal_path: None, @@ -360,26 +377,52 @@ impl GixDownloadedProject { } } -/// Clone the repo, the checkout `rev` (which must be a commit SHA1/256). +/// Clone the repo, the checkout `git_ref` if present, or default branch `HEAD` +/// otherwise. /// Adapted from gitoxide `main_worktree()`: /// https://github.com/GitoxideLabs/gitoxide/blob/v0.52.0/gix/src/clone/checkout.rs#L85 fn download_repo_to_temp( tmp_dir: &Utf8TempDir, url: &str, - rev: Option<&str>, -) -> Result { - let repo = if let Some(rev) = rev { + git_ref: Option<&GitId>, +) -> Result<(gix::Repository, Option), GixDownloadedError> { + let ret = if let Some(git_ref) = git_ref { // Fetch all objects without checking out any files let (repo, _) = gix::prepare_clone(url, tmp_dir.path()) .unwrap() .fetch_only(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) .unwrap(); - // Resolve the SHA to a commit, then get its tree - // We already checked that this is a valid SHA1/256 - let commit_id = gix::ObjectId::from_hex(rev.as_bytes()).unwrap(); + let rev = match git_ref { + GitId::Rev(rev) => { + // Resolve the SHA to a commit, then get its tree + gix::ObjectId::from_hex(rev.as_bytes()).unwrap() + } + GitId::Tag(tag) => { + // TODO: tags may not be present due to gitconfig which is read by gix, + // and thus may need to be fetched separately + repo.try_find_reference(&format!("refs/tags/{tag}")) + .unwrap() + .unwrap() + .peel_to_commit() + .unwrap() + .id + } + GitId::Branch(branch) => { + let remote = repo.find_default_remote(Direction::Fetch).unwrap().unwrap(); + let branch_ref = + format!("refs/remotes/{}/{branch}", remote.name().unwrap().as_bstr()); + repo.try_find_reference(&branch_ref) + .unwrap() + .unwrap() + .peel_to_commit() + .unwrap() + .id + } + }; + let tree_id = repo - .find_object(commit_id) + .find_object(rev) .unwrap() .into_commit() .tree_id() @@ -407,7 +450,8 @@ fn download_repo_to_temp( .unwrap(); index.write(Default::default()).unwrap(); - repo + + (repo, Some(rev)) } else { let prepared_clone = prepare_clone(url.clone(), tmp_dir.path()) .map_err(|e| GixDownloadedError::Clone(url.to_string(), Box::new(e)))?; @@ -420,10 +464,10 @@ fn download_repo_to_temp( .main_worktree(gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED) .map_err(|e| GixDownloadedError::Checkout(tmp_dir.to_path_buf(), Box::new(e)))?; - repo + (repo, None) }; - Ok(repo) + Ok(ret) } impl ProjectRead for GixDownloadedProject { diff --git a/core/src/project/local_src.rs b/core/src/project/local_src.rs index fe8b7aff..e720578d 100644 --- a/core/src/project/local_src.rs +++ b/core/src/project/local_src.rs @@ -435,4 +435,8 @@ impl ProjectRead for LocalSrcProject { panic!("`LocalSrcProject` without `nominal_path` does not have any project sources"); } } + + fn base_path_for_usage_resolver(&self) -> Option<&Utf8Path> { + Some(&self.project_path) + } } diff --git a/core/src/project/memory.rs b/core/src/project/memory.rs index 83674569..efbd2407 100644 --- a/core/src/project/memory.rs +++ b/core/src/project/memory.rs @@ -159,4 +159,12 @@ impl ProjectRead for InMemoryProject { debug_assert!(!self.nominal_sources.is_empty()); Ok(self.nominal_sources.clone()) } + + fn project_root(&self) -> Option<&camino::Utf8Path> { + None + } + + fn base_path_for_usage_resolver(&self) -> Option<&camino::Utf8Path> { + None + } } diff --git a/core/src/project/mod.rs b/core/src/project/mod.rs index b122f894..3b691291 100644 --- a/core/src/project/mod.rs +++ b/core/src/project/mod.rs @@ -158,9 +158,9 @@ pub trait ProjectRead { // Optional and helpers /// Returns the local filesystem root path of this project, if available. - fn project_root(&self) -> Option<&Utf8Path> { - None - } + fn project_root(&self) -> Option<&Utf8Path>; + + fn base_path_for_usage_resolver(&self) -> Option<&Utf8Path>; fn get_info(&self) -> Result, Self::Error> { Ok(self.get_project()?.0) @@ -280,6 +280,14 @@ impl ProjectRead for &T { (*self).get_project() } + fn project_root(&self) -> Option<&Utf8Path> { + (*self).project_root() + } + + fn base_path_for_usage_resolver(&self) -> Option<&Utf8Path> { + (*self).base_path_for_usage_resolver() + } + type SourceReader<'a> = T::SourceReader<'a> where @@ -356,6 +364,14 @@ impl ProjectRead for &mut T { (**self).get_project() } + fn project_root(&self) -> Option<&Utf8Path> { + (**self).project_root() + } + + fn base_path_for_usage_resolver(&self) -> Option<&Utf8Path> { + (**self).base_path_for_usage_resolver() + } + type SourceReader<'a> = T::SourceReader<'a> where @@ -435,6 +451,12 @@ pub trait ProjectReadAsync { >, >; + // These don't need to be async + + fn project_root_async(&self) -> impl Future>; + + fn base_path_for_usage_resolver_async(&self) -> impl Future>; + type SourceReader<'a>: AsyncRead + Unpin where Self: 'a; @@ -612,6 +634,14 @@ impl ProjectReadAsync for &T { (**self).get_project_async() } + fn project_root_async(&self) -> impl Future> { + (**self).project_root_async() + } + + fn base_path_for_usage_resolver_async(&self) -> impl Future> { + (**self).base_path_for_usage_resolver_async() + } + type SourceReader<'a> = T::SourceReader<'a> where @@ -706,6 +736,14 @@ impl ProjectReadAsync for &mut T { (**self).get_project_async() } + fn project_root_async(&self) -> impl Future> { + (**self).project_root_async() + } + + fn base_path_for_usage_resolver_async(&self) -> impl Future> { + (**self).base_path_for_usage_resolver_async() + } + type SourceReader<'a> = T::SourceReader<'a> where @@ -1036,6 +1074,14 @@ where self.inner.get_project() } + async fn project_root_async(&self) -> Option<&Utf8Path> { + self.inner.project_root() + } + + async fn base_path_for_usage_resolver_async(&self) -> Option<&Utf8Path> { + self.inner.base_path_for_usage_resolver() + } + type SourceReader<'a> = AsAsyncReader<::SourceReader<'a>> where @@ -1091,6 +1137,15 @@ impl ProjectRead for AsSyncProjectTokio { self.runtime.block_on(self.inner.get_project_async()) } + fn project_root(&self) -> Option<&Utf8Path> { + self.runtime.block_on(self.inner.project_root_async()) + } + + fn base_path_for_usage_resolver(&self) -> Option<&Utf8Path> { + self.runtime + .block_on(self.inner.base_path_for_usage_resolver_async()) + } + type SourceReader<'a> = AsSyncReaderTokio<::SourceReader<'a>> where diff --git a/core/src/project/reference.rs b/core/src/project/reference.rs index d5001004..52b73ce2 100644 --- a/core/src/project/reference.rs +++ b/core/src/project/reference.rs @@ -10,8 +10,8 @@ use crate::{ project::ProjectRead, }; -// Arc wrapper around project to make cloning possible -// (necessary for compatibility with `MemoryResolver`) +/// Arc wrapper around project to make cloning possible +/// (necessary for compatibility with `MemoryResolver`) #[derive(Debug)] pub struct ProjectReference { project: Arc, @@ -63,6 +63,14 @@ impl ProjectRead for ProjectReference { fn sources(&self, ctx: &ProjectContext) -> Result, Self::Error> { self.project.sources(ctx) } + + fn project_root(&self) -> Option<&camino::Utf8Path> { + self.project.project_root() + } + + fn base_path_for_usage_resolver(&self) -> Option<&camino::Utf8Path> { + todo!() + } } #[cfg(feature = "filesystem")] diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index 54d68962..8d195500 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -4,10 +4,14 @@ use std::{ fmt::Display, io::{self, Read}, + ops::Deref, }; use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; -use fluent_uri::pct_enc::{EString, encoder::IData}; +use fluent_uri::{ + Iri, + pct_enc::{EString, encoder::IData}, +}; use serde::{Deserialize, Serialize}; use thiserror::Error; use typed_path::Utf8UnixPathBuf; @@ -16,8 +20,8 @@ use zip::{self, result::ZipError}; use crate::model::InterchangeProjectUsage; -/// Project identifier IRI. Constructed by -// TODO: steps +/// Project identifier IRI. Always a valid IRI. +// TODO: construction steps #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] pub struct Identifier(String); @@ -43,6 +47,10 @@ impl Identifier { Self(make_identifier_iri(publisher, name)) } + pub fn from_iri(iri: &Iri) -> Identifier { + Self(iri.to_string()) + } + pub fn as_str(&self) -> &str { &self.0 } @@ -50,6 +58,16 @@ impl Identifier { pub fn into_string(self) -> String { self.0 } + + /// Construct `Identifier` from a String, assuming it's a valid IRI + pub fn from_iri_unchecked(iri: String) -> Identifier { + Self(iri) + } + + /// Construct `Identifier` from `&str`, assuming it's a valid IRI + pub fn from_iri_unchecked_str(iri: &str) -> Identifier { + Self(iri.to_owned()) + } } impl AsRef for Identifier { @@ -58,6 +76,27 @@ impl AsRef for Identifier { } } +impl Deref for Identifier { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for Identifier { + fn from(value: Iri) -> Self { + Self(value.into_string()) + } +} + +impl From for Iri { + fn from(value: Identifier) -> Self { + // Identifier is always valid IRI + Iri::parse(value.0).unwrap() + } +} + // impl From for Identifier { // fn from(value: Usage) -> Self { // } diff --git a/core/src/resolve/combined.rs b/core/src/resolve/combined.rs index df985a65..65a7b894 100644 --- a/core/src/resolve/combined.rs +++ b/core/src/resolve/combined.rs @@ -234,6 +234,7 @@ impl< usage: &InterchangeProjectUsage, base_path: Option>, ) -> Result, Self::Error> { + let base_path = base_path.as_ref().map(|p| p.as_ref()); let mut at_least_one_supports = false; // If the file resolver does not outright reject the IRI type, @@ -241,12 +242,12 @@ impl< // TODO: autodetect git (and possibly other VCSs), and use appropriate (e.g. git) resolver for them. if let Some(file_resolver) = &self.file_resolver { match file_resolver - .resolve_read(usage) + .resolve_read(usage, base_path) .map_err(CombinedResolverError::File)? { - ResolutionOutcome::UnsupportedUsageType(msg) => { - log::debug!("file resolver rejected IRI `{uri}`: {msg}"); - } // Just continue + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("file resolver does not support usage type of {usage}: {reason}"); + } ResolutionOutcome::Resolved(r) => { //at_least_one_supports = true; return Ok(ResolutionOutcome::Resolved(CombinedIterator { @@ -254,11 +255,16 @@ impl< locals: IndexMap::new(), })); } - ResolutionOutcome::NotFound(msg) => { - return Ok(ResolutionOutcome::NotFound(format!( - "failed to resolve as file: {msg}" - ))); + ResolutionOutcome::NotFound(usage, reason) => { + at_least_one_supports = true; + log::debug!("file resolver did not find {usage}: {reason}"); + } + ResolutionOutcome::Unresolvable(msg) => { + // Even though usage got rejected, it is still considered by this resolver + at_least_one_supports = true; + log::debug!("file resolver rejected usage: {msg}") } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), } } @@ -267,7 +273,7 @@ impl< if let Some(local_resolver) = &self.local_resolver { match local_resolver - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(CombinedResolverError::Local)? { ResolutionOutcome::Resolved(projects) => { @@ -275,9 +281,7 @@ impl< for res in projects { match res { Err(err) => { - log::debug!( - "local resolver rejected project with IRI `{uri}`: {err}", - ); + log::debug!("local resolver rejected project {usage}: {err}",); } Ok(project) => match project.get_project() { Ok((Some(info), Some(meta))) => { @@ -285,25 +289,28 @@ impl< } Ok(_) => { log::debug!( - "local resolver rejected project with IRI `{uri}` due to missing project info/meta", + "local resolver rejected project {usage} due to missing project info/meta", ); } Err(err) => { - log::debug!( - "local resolver rejected project with IRI `{uri}`: {err}", - ); + log::debug!("local resolver rejected project {usage}: {err}"); } }, } } } - ResolutionOutcome::UnsupportedUsageType(msg) => { - log::debug!("local resolver rejected IRI `{uri}`: {msg}"); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("local resolver does not support usage type of {usage}: {reason}"); } - ResolutionOutcome::NotFound(msg) => { + ResolutionOutcome::NotFound(usage, reason) => { at_least_one_supports = true; - log::debug!("local resolver unable to resolve IRI `{uri}`: {msg}"); + log::debug!("local resolver did not find {usage}: {reason}"); } + ResolutionOutcome::Unresolvable(msg) => { + at_least_one_supports = true; + log::debug!("local resolver rejected usage: {msg}") + } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), }; } @@ -313,15 +320,15 @@ impl< if let Some(remote_resolver) = &self.remote_resolver { // Skip over remote resolution if unresolvable or if only invalid projects are produced. match remote_resolver - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(CombinedResolverError::Remote)? { - ResolutionOutcome::UnsupportedUsageType(msg) => { - log::debug!("remote resolver rejected IRI `{uri}`: {msg}"); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!("remote resolver rejected {usage}: {reason}"); } - ResolutionOutcome::NotFound(msg) => { + ResolutionOutcome::NotFound(usage, reason) => { at_least_one_supports = true; - log::debug!("remote resolver unable to resolve IRI `{uri}`: {msg}"); + log::debug!("remote resolver unable to resolve {usage}: {reason}"); } ResolutionOutcome::Resolved(remote_projects) => { at_least_one_supports = true; @@ -332,7 +339,7 @@ impl< match remote_projects.peek() { Some(Err(err)) => { log::debug!( - "remote resolver skipping project for IRI `{uri}` due to: {err}" + "remote resolver skipping project for {usage} due to: {err}" ); remote_projects.next(); } @@ -354,13 +361,13 @@ impl< } Ok(_) => { log::debug!( - "remote resolver skipping project for IRI `{uri}` due to missing info/meta" + "remote resolver skipping project for {usage} due to missing info/meta" ); remote_projects.next(); } Err(err) => { log::debug!( - "remote resolver skipping project for IRI `{uri}`: {err}" + "remote resolver skipping project for {usage}: {err}" ); remote_projects.next(); } @@ -368,20 +375,25 @@ impl< } None => { log::debug!( - "remote resolver unable to find valid project for IRI `{uri}`" + "remote resolver unable to find valid project for {usage}" ); break; } } } } + ResolutionOutcome::Unresolvable(msg) => { + at_least_one_supports = true; + log::debug!("remote resolver rejected usage: {msg}") + } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), } } // Finally try the centralised registry if neither file/remote gave anything useful if let Some(index_resolver) = &self.index_resolver { match index_resolver - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(CombinedResolverError::Registry)? { ResolutionOutcome::Resolved(x) => { @@ -390,24 +402,33 @@ impl< locals, })); } - ResolutionOutcome::UnsupportedUsageType(msg) => { - log::debug!("registry resolver rejected IRI `{uri}` due to: {msg}"); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + log::debug!( + "registry resolver does not support usage type of {usage}: {reason}" + ); } - ResolutionOutcome::NotFound(msg) => { + ResolutionOutcome::NotFound(usage, reason) => { at_least_one_supports = true; - log::debug!("registry resolver unable to resolve IRI `{uri}`: {msg}"); + log::debug!("registry resolver unable to resolve {usage}: {reason}"); } + ResolutionOutcome::Unresolvable(msg) => { + at_least_one_supports = true; + log::debug!("registry resolver rejected usage: {msg}") + } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), }; } // As a last resort, use only locally cached projects, if any were found if !at_least_one_supports { - Ok(ResolutionOutcome::UnsupportedUsageType( - "no resolver accepted the IRI".to_owned(), - )) + Ok(ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("no resolver supports the project type"), + }) } else if locals.is_empty() { Ok(ResolutionOutcome::NotFound( - "no resolver was able to resolve the IRI".to_owned(), + usage.to_owned(), + String::from("no resolver was able to resolve the project"), )) } else { Ok(ResolutionOutcome::Resolved(CombinedIterator { diff --git a/core/src/resolve/env.rs b/core/src/resolve/env.rs index 69b2a65a..e291a622 100644 --- a/core/src/resolve/env.rs +++ b/core/src/resolve/env.rs @@ -7,6 +7,7 @@ use camino::Utf8Path; use crate::{ env::{ReadEnvironment, ReadEnvironmentAsync}, model::InterchangeProjectUsage, + project::utils::Identifier, resolve::{ResolutionOutcome, ResolveRead, ResolveReadAsync}, }; @@ -25,22 +26,23 @@ impl ResolveRead for EnvResolver { fn resolve_read( &self, usage: &InterchangeProjectUsage, - base_path: Option>, + _base_path: Option>, ) -> Result, Self::Error> { - let versions = self.env.versions(uri)?; + let identifier = Identifier::from_interchange_usage(usage); + let versions = self.env.versions(&identifier)?; let projects: Self::ResolvedStorages = versions .into_iter() .map( |version| -> Result { - self.env.get_project(uri.clone(), version?) + self.env.get_project(&identifier, version?) }, ) .collect(); if projects.is_empty() { Ok(ResolutionOutcome::NotFound( usage.to_owned(), - String::from("no versions of `{uri}` found in environment"), + String::from("no versions of the project found in environment"), )) } else { Ok(ResolutionOutcome::Resolved(projects)) @@ -65,21 +67,24 @@ impl ResolveReadAsync for EnvResolver { async fn resolve_read_async( &self, usage: &InterchangeProjectUsage, - base_path: Option>, + _base_path: Option>, ) -> Result, Self::Error> { use futures::StreamExt as _; + let identifier = Identifier::from_interchange_usage(usage); - let versions: Vec> = self.env.versions_async(uri).await?.collect().await; + let versions: Vec> = + self.env.versions_async(&identifier).await?.collect().await; if versions.is_empty() { - return Ok(ResolutionOutcome::NotFound(format!( - "no versions of `{uri}` found in environment" - ))); + return Ok(ResolutionOutcome::NotFound( + usage.to_owned(), + String::from("no versions of the project found in environment"), + )); } let projects = futures::future::join_all( versions .into_iter() - .map(|version| async { self.env.get_project_async(uri.clone(), version?).await }), + .map(|version| async { self.env.get_project_async(&identifier, version?).await }), ) .await; diff --git a/core/src/resolve/file.rs b/core/src/resolve/file.rs index d259b61c..1964b0ce 100644 --- a/core/src/resolve/file.rs +++ b/core/src/resolve/file.rs @@ -15,7 +15,7 @@ use thiserror::Error; use crate::{ context::ProjectContext, lock::Source, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsage}, project::{ self, ProjectRead, local_kpar::{LocalKParError, LocalKParProject}, @@ -88,9 +88,8 @@ impl FileResolver { if let Some(root_part) = &self.relative_path_root { root_part.join(&path) } else { - return Ok(ResolutionOutcome::UnsupportedUsageType(format!( - "cannot resolve relative file without a specified root directory: {}", - path + return Ok(ResolutionOutcome::Unresolvable(format!( + "cannot resolve relative path `{path}` without a specified root directory" ))); } } else { @@ -125,13 +124,48 @@ impl FileResolver { fn resolve_general( &self, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + // TODO: maybe pack this into the path usage (of course ignore it for serde)? + base_path: Option>, ) -> Result, FileResolverError> { - match try_file_uri_to_path(uri)? { - Some(path) => self.resolve_platform_path(path), - None => Ok(ResolutionOutcome::UnsupportedUsageType(format!( - "`{uri}` is not a file URL", - ))), + match usage { + InterchangeProjectUsage::Resource { + resource: url, + // TODO: don't ignore, check that the project version satisfies this + version_constraint: _, + } + | InterchangeProjectUsage::Url { + url, + publisher: _, + name: _, + } => match try_file_uri_to_path(url)? { + Some(path) => self.resolve_platform_path(path), + None => Ok(ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("resource is not a file URL"), + }), + }, + // TODO: we must check somewhere that publisher/name match actual + InterchangeProjectUsage::Path { + path, + publisher: _, + name: _, + } => { + if path.is_absolute() { + self.resolve_platform_path(path.into()) + } else if let Some(base) = base_path { + let abs_path = base.as_ref().join(path.as_str()); + self.resolve_platform_path(abs_path) + } else { + Ok(ResolutionOutcome::Unresolvable(format!( + "cannot resolve relative path usage without a base path" + ))) + } + } + _ => Ok(ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("not a path usage"), + }), } } } @@ -281,7 +315,7 @@ impl ResolveRead for FileResolver { usage: &InterchangeProjectUsage, base_path: Option>, ) -> Result, Self::Error> { - Ok(match self.resolve_general(uri)? { + Ok(match self.resolve_general(usage, base_path)? { ResolutionOutcome::Resolved(path) => ResolutionOutcome::Resolved(vec![ Ok(FileResolverProject::LocalSrcProject(LocalSrcProject { nominal_path: None, @@ -291,10 +325,14 @@ impl ResolveRead for FileResolver { LocalKParProject::new_guess_root(path)?, )), ]), - ResolutionOutcome::UnsupportedUsageType(msg) => { - ResolutionOutcome::UnsupportedUsageType(msg) + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + ResolutionOutcome::UnsupportedUsageType { usage, reason } + } + ResolutionOutcome::NotFound(usage, reason) => { + ResolutionOutcome::NotFound(usage, reason) } - ResolutionOutcome::NotFound(msg) => ResolutionOutcome::NotFound(msg), + ResolutionOutcome::Unresolvable(msg) => ResolutionOutcome::Unresolvable(msg), + ResolutionOutcome::InvalidUsage(..) => unreachable!(), }) } } diff --git a/core/src/resolve/gix_git.rs b/core/src/resolve/gix_git.rs index d03fc167..055c457b 100644 --- a/core/src/resolve/gix_git.rs +++ b/core/src/resolve/gix_git.rs @@ -1,10 +1,14 @@ +use std::convert::Infallible; + use camino::Utf8Path; use fluent_uri::component::Scheme; use thiserror::Error; use crate::{ - model::InterchangeProjectUsageRaw, - project::gix_git_download::{GixDownloadedError, GixDownloadedProject}, + model::{InterchangeProjectUsage, InterchangeProjectUsageRaw}, + project::gix_git_download::{ + GixDownloadedError, GixDownloadedProject, GixDownloadedProjectExact, + }, resolve::{ ResolutionOutcome, ResolveRead, file::SCHEME_FILE, @@ -30,7 +34,7 @@ pub const SCHEME_GIT_HTTPS: &Scheme = Scheme::new_or_panic("git+https"); impl ResolveRead for GitResolver { type Error = GitResolverError; - type ProjectStorage = GixDownloadedProject; + type ProjectStorage = GixDownloadedProjectExact; type ResolvedStorages = std::iter::Once>; @@ -39,45 +43,100 @@ impl ResolveRead for GitResolver { usage: &InterchangeProjectUsage, base_path: Option>, ) -> Result, Self::Error> { - let scheme = uri.scheme(); - - if ![ - SCHEME_HTTP, - SCHEME_HTTPS, - SCHEME_FILE, - SCHEME_SSH, - SCHEME_GIT_HTTP, - SCHEME_GIT_HTTPS, - SCHEME_GIT_FILE, - SCHEME_GIT_SSH, - ] - .contains(&scheme) - { - return Ok(ResolutionOutcome::UnsupportedUsageType(format!( - "url scheme `{}` of IRI `{}` is not known to be git-compatible", - scheme, - uri.as_str() - ))); - } - - Ok(ResolutionOutcome::Resolved(std::iter::once( - // TODO: use trim_prefix() once it's stable - GixDownloadedProject::new(uri.as_str().strip_prefix("git+").unwrap_or(uri.as_str())) - .map_err(|e| e.into()), - ))) + // TODO: should URL usages be supported for git? + let outcome = match usage { + InterchangeProjectUsage::Resource { + resource, + version_constraint, + } => { + let scheme = resource.scheme(); + + if ![ + SCHEME_HTTP, + SCHEME_HTTPS, + SCHEME_FILE, + SCHEME_SSH, + SCHEME_GIT_HTTP, + SCHEME_GIT_HTTPS, + SCHEME_GIT_FILE, + SCHEME_GIT_SSH, + ] + .contains(&scheme) + { + return Ok(ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: format!("url scheme `{scheme}` is not known to be git-compatible"), + }); + } + + ResolutionOutcome::Resolved(std::iter::once( + // TODO: use trim_prefix() once it's stable + GixDownloadedProjectExact::new_download_find( + resource + .as_str() + .strip_prefix("git+") + .unwrap_or(resource.as_str()), + None, + None::<(&str, &str)>, + ) + .map_err(|e| e.into()), + )) + } + InterchangeProjectUsage::Git { + git: iri, + id, + publisher, + name, + } => { + let scheme = iri.scheme(); + + if ![ + SCHEME_HTTP, + SCHEME_HTTPS, + SCHEME_FILE, + SCHEME_SSH, + SCHEME_GIT_HTTP, + SCHEME_GIT_HTTPS, + SCHEME_GIT_FILE, + SCHEME_GIT_SSH, + ] + .contains(&scheme) + { + return Ok(ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: format!("url scheme `{scheme}` is not known to be git-compatible"), + }); + } + + ResolutionOutcome::Resolved(std::iter::once( + // TODO: use trim_prefix() once it's stable + GixDownloadedProjectExact::new_download_find( + iri.as_str().strip_prefix("git+").unwrap_or(iri.as_str()), + Some(id), + Some((publisher, name)), + ) + .map_err(|e| e.into()), + )) + } + _ => ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("not a url/resource usage"), + }, + }; + Ok(outcome) } - fn resolve_read_raw>( - &self, - usage: &InterchangeProjectUsageRaw, - base_path: Option>, - ) -> Result, Self::Error> { - if let Some(stripped_uri) = uri.as_ref().strip_prefix("git+") { - self.default_resolve_read_raw(stripped_uri) - } else { - self.default_resolve_read_raw(uri) - } - } + // fn resolve_read_raw( + // &self, + // usage: &InterchangeProjectUsageRaw, + // base_path: Option>, + // ) -> Result, Self::Error> { + // if let Some(stripped_uri) = uri.as_ref().strip_prefix("git+") { + // self.default_resolve_read_raw(stripped_uri) + // } else { + // self.default_resolve_read_raw(uri) + // } + // } } #[cfg(test)] diff --git a/core/src/resolve/memory.rs b/core/src/resolve/memory.rs index e95e8472..fd95c671 100644 --- a/core/src/resolve/memory.rs +++ b/core/src/resolve/memory.rs @@ -3,10 +3,12 @@ use std::{collections::HashMap, convert::Infallible}; +use camino::Utf8Path; use fluent_uri::{Iri, component::Scheme}; use crate::{ - project::ProjectRead, + model::InterchangeProjectUsage, + project::{ProjectRead, utils::Identifier}, resolve::{ResolutionOutcome, ResolveRead}, }; @@ -46,6 +48,7 @@ impl From, Vec)>> pub trait IRIPredicate { fn accept_iri(&self, iri: &Iri) -> bool; + // TODO: be more efficient, don't clone fn accept_iri_raw(&self, iri: &str) -> bool { match Iri::parse(iri.to_string()) { Ok(iri) => self.accept_iri(&iri), @@ -86,17 +89,32 @@ impl ResolveRead fn resolve_read( &self, usage: &InterchangeProjectUsage, - base_path: Option>, + _base_path: Option>, ) -> Result, Self::Error> { - if !self.iri_predicate.accept_iri(uri) { - return Ok(ResolutionOutcome::UnsupportedUsageType(format!( - "invalid IRI `{uri}` for this memory resolver" - ))); - } - - Ok(match self.projects.get(uri) { + let identifier = match usage { + InterchangeProjectUsage::Resource { + resource, + version_constraint: _, + } => { + // TODO: should publisher/name identifiers be filtered? + if !self.iri_predicate.accept_iri(resource) { + return Ok(ResolutionOutcome::Unresolvable(format!( + "invalid IRI `{resource}` for this memory resolver" + ))); + } + Identifier::from_iri(resource) + } + _ => Identifier::from_interchange_usage(usage), + }; + + // TODO: be more efficient, avoid reparsing IRI. Maybe make `Identifier` contain `Iri`? + let iri: Iri = identifier.into(); + Ok(match self.projects.get(&iri) { Some(xs) => ResolutionOutcome::Resolved(xs.iter().map(|x| Ok(x.clone())).collect()), - None => ResolutionOutcome::NotFound(uri.to_string()), + None => ResolutionOutcome::NotFound( + usage.to_owned(), + String::from("memory resolver does not contain this project"), + ), }) } } diff --git a/core/src/resolve/mod.rs b/core/src/resolve/mod.rs index ac8e3347..7e472332 100644 --- a/core/src/resolve/mod.rs +++ b/core/src/resolve/mod.rs @@ -74,7 +74,7 @@ impl ResolutionOutcome { pub trait ResolveRead { type Error: ErrorBound; - type ProjectStorage: ProjectRead; + type ProjectStorage: ProjectRead; // + Clone; type ResolvedStorages: IntoIterator>; // TODO: move path-specific docs to FileResolver diff --git a/core/src/resolve/reqwest_http.rs b/core/src/resolve/reqwest_http.rs index 18ff7876..7887e443 100644 --- a/core/src/resolve/reqwest_http.rs +++ b/core/src/resolve/reqwest_http.rs @@ -3,6 +3,7 @@ use std::{convert::Infallible, io, pin::Pin, sync::Arc}; +use camino::Utf8Path; use fluent_uri::component::Scheme; use futures::AsyncRead; use thiserror::Error; @@ -11,7 +12,7 @@ use crate::{ auth::HTTPAuthentication, context::ProjectContext, lock::Source, - model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, + model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw, InterchangeProjectUsage}, project::{ ProjectRead, ProjectReadAsync, reqwest_kpar_download::ReqwestKparDownloadedProject, reqwest_src::ReqwestSrcProjectAsync, @@ -266,28 +267,42 @@ impl ResolveReadAsync for HTTPResolverAsync async fn resolve_read_async( &self, usage: &InterchangeProjectUsage, - base_path: Option>, + _base_path: Option>, ) -> Result, Self::Error> { // Try to resolve as a HTTP src project. - Ok( - if uri.scheme() == SCHEME_HTTP || uri.scheme() == SCHEME_HTTPS { - if let Ok(url) = reqwest::Url::parse(uri.as_str()) { - ResolutionOutcome::Resolved(futures::stream::iter(HTTPProjects { - client: self.client.clone(), - url, - src_done: false, - kpar_done: false, - lax: self.lax, - auth_policy: self.auth_policy.clone(), - // prefer_ranged: self.prefer_ranged, - })) + let outcome = match usage { + InterchangeProjectUsage::Resource { resource: iri, .. } + | InterchangeProjectUsage::Url { url: iri, .. } => { + if iri.scheme() == SCHEME_HTTP || iri.scheme() == SCHEME_HTTPS { + if let Ok(url) = reqwest::Url::parse(iri.as_str()) { + ResolutionOutcome::Resolved(futures::stream::iter(HTTPProjects { + client: self.client.clone(), + url, + src_done: false, + kpar_done: false, + lax: self.lax, + auth_policy: self.auth_policy.clone(), + // prefer_ranged: self.prefer_ranged, + })) + } else { + ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("invalid http(s) URL"), + } + } } else { - ResolutionOutcome::UnsupportedUsageType("invalid http(s) URL".to_string()) + ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("not an http(s) URL"), + } } - } else { - ResolutionOutcome::UnsupportedUsageType("not an http(s) URL".to_string()) + } + _ => ResolutionOutcome::UnsupportedUsageType { + usage: usage.to_owned(), + reason: String::from("not a url/resource usage"), }, - ) + }; + Ok(outcome) } } @@ -299,6 +314,7 @@ mod tests { use crate::{ auth::Unauthenticated, + model::InterchangeProjectUsageRaw, project::ProjectRead, resolve::{ ResolutionOutcome, ResolveRead, ResolveReadAsync, net_utils::create_reqwest_client, @@ -339,8 +355,13 @@ mod tests { .unwrap(), )); - let ResolutionOutcome::Resolved(projects) = - resolver.resolve_read_raw(format!("http://{}/foo/", host))? + let ResolutionOutcome::Resolved(projects) = resolver.resolve_read_raw( + &InterchangeProjectUsageRaw::Resource { + resource: format!("http://{}/foo/", host), + version_constraint: None, + }, + None, + )? else { panic!() }; @@ -386,7 +407,14 @@ mod tests { "http://www.example.invalid/foo" }; - let ResolutionOutcome::Resolved(projects) = resolver.resolve_read_raw(url)? else { + let ResolutionOutcome::Resolved(projects) = resolver.resolve_read_raw( + &InterchangeProjectUsageRaw::Resource { + resource: url.to_owned(), + version_constraint: None, + }, + None, + )? + else { panic!() }; let projects: Vec> = diff --git a/core/src/resolve/typed_resolver.rs b/core/src/resolve/typed_resolver.rs index 57fc124a..ec25a8c6 100644 --- a/core/src/resolve/typed_resolver.rs +++ b/core/src/resolve/typed_resolver.rs @@ -1,3 +1,4 @@ +use camino::Utf8Path; use url::Url; use crate::{ @@ -87,6 +88,7 @@ impl TypedResolver { pub fn resolve( &self, usage: &InterchangeProjectUsage, + base_path: Option>, ) -> Result, impl ErrorBound> { match usage { InterchangeProjectUsage::Resource { diff --git a/core/src/solve/pubgrub.rs b/core/src/solve/pubgrub.rs index 88797e27..d4bd8584 100644 --- a/core/src/solve/pubgrub.rs +++ b/core/src/solve/pubgrub.rs @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 -use fluent_uri::Iri; +use camino::{Utf8Path, Utf8PathBuf}; use pubgrub::{DefaultStringReporter, DependencyProvider, Reporter, VersionSet}; use std::{ @@ -14,27 +14,24 @@ use std::{ use thiserror::Error; use crate::{ - model::{ - InterchangeProjectInfo, InterchangeProjectMetadataRaw, InterchangeProjectUsage, - InterchangeProjectUsageG, - }, - project::ProjectRead, - resolve::ResolveRead, + model::{InterchangeProjectInfo, InterchangeProjectMetadataRaw, InterchangeProjectUsage}, + project::{ProjectRead, utils::Identifier}, + resolve::{ResolutionOutcome, ResolveRead}, }; #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum DependencyIdentifier { /// Dependencies that are to be resolved. - Requested(Vec), + Requested(Vec, Option), /// Found dependencies. Note that this does not mean that the /// required version was found, just that the IRI was resolved. - Remote(fluent_uri::Iri), + Remote(InterchangeProjectUsage, Option), } impl Display for DependencyIdentifier { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - DependencyIdentifier::Requested(_requested) => { + DependencyIdentifier::Requested(_requested, _base_path) => { write!(f, "requested project(s)") // if requested.len() == 1 { // let req = &requested[0]; @@ -63,7 +60,7 @@ impl Display for DependencyIdentifier { // write!(f, "]") } - DependencyIdentifier::Remote(iri) => write!(f, "{}", iri), + DependencyIdentifier::Remote(iri, ..) => write!(f, "{}", iri), } } } @@ -180,7 +177,7 @@ impl VersionSet for DiscreteHashSet { } type ResolvedCandidates = HashMap< - fluent_uri::Iri, + Identifier, Vec<( InterchangeProjectInfo, InterchangeProjectMetadataRaw, @@ -195,13 +192,23 @@ pub struct ProjectSolver { resolver: R, } +// easyfind4 +// Resolving path deps: +// - absolute paths: no special treatment, will be resolved for all local/remote projects +// - relative paths: +// - if dep of local, resolve, taking as base the path of the using project +// - if dep of git, resolve, taking as base the path of the locally cloned repo, +// BUT RESTRICTING IT TO THE REPO (using file resolver's limiting functionality) + /// Returned Vec will have `len >= 1` fn resolve_candidates( resolver: &R, - uri: &fluent_uri::Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, cache: &mut ResolvedCandidates, ) -> Result, InternalSolverError> { - let entry = cache.entry(uri.clone()); + let identifier = Identifier::from_interchange_usage(&usage); + let entry = cache.entry(identifier); match entry { Entry::Occupied(occupied_entry) => Ok(occupied_entry @@ -213,23 +220,21 @@ fn resolve_candidates( let mut found = vec![]; match resolver - .resolve_read(uri) + .resolve_read(usage, base_path) .map_err(InternalSolverError::Resolution)? { - crate::resolve::ResolutionOutcome::UnsupportedUsageType(msg) => { - return Err(InternalSolverError::UnsupportedIriType(format!( - "unsupported IRI type of `{uri}`: {msg}" - ))); + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + return Err(InternalSolverError::UnsupportedUsageType { usage, reason }); } - crate::resolve::ResolutionOutcome::NotFound(msg) => { - return Err(InternalSolverError::NotFound(uri.as_str().into(), msg)); + ResolutionOutcome::NotFound(usage, reason) => { + return Err(InternalSolverError::NotFound(usage, reason)); } - crate::resolve::ResolutionOutcome::Resolved(alternatives) => { + ResolutionOutcome::Resolved(alternatives) => { for alternative in alternatives { let project = match alternative { Ok(project) => project, Err(e) => { - log::debug!("candidate project for `{uri}` is error: {e}"); + log::debug!("candidate project for {usage} is error: {e}"); continue; } }; @@ -238,13 +243,13 @@ fn resolve_candidates( Ok((Some(info), Some(meta))) => (info, meta), Ok(incomplete) => { log::debug!( - "candidate project for `{uri}` failed to get info or meta: {incomplete:?}" + "candidate project for {usage} failed to get info or meta: {incomplete:?}" ); continue; } Err(e) => { log::debug!( - "candidate project for `{uri}` failed to get info and meta: {e}" + "candidate project for {usage} failed to get info and meta: {e}" ); continue; } @@ -253,7 +258,7 @@ fn resolve_candidates( let validated_info: InterchangeProjectInfo = match info.try_into() { Ok(i) => i, Err(e) => { - log::debug!("candidate project for `{uri}` has invalid info: {e}"); + log::debug!("candidate project for {usage} has invalid info: {e}"); continue; } }; @@ -261,9 +266,13 @@ fn resolve_candidates( found.push((validated_info, meta, project)); } if found.is_empty() { - return Err(InternalSolverError::NoValidCandidates(uri.as_str().into())); + return Err(InternalSolverError::NoValidCandidates(usage.to_owned())); } } + ResolutionOutcome::Unresolvable(msg) => { + return Err(InternalSolverError::Unresolvable(msg)); + } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), } let result: Vec<(InterchangeProjectInfo, InterchangeProjectMetadataRaw)> = found @@ -280,8 +289,8 @@ fn resolve_candidates( fn compute_deps( resolver: &R, - url_resolver: // TODO: URL may be file:, in that case we need FileResolver usages: &Vec, + base_path: Option>, cache: &mut ResolvedCandidates, ) -> Result< pubgrub::Dependencies, @@ -290,94 +299,52 @@ fn compute_deps( let mut depmap: HashMap = pubgrub::Map::default(); for usage in usages { - match usage { - InterchangeProjectUsageG::Resource { - resource, - version_constraint, - } => { - if let Some(constraint) = version_constraint { - let mut valid_candidates = HashSet::new(); - - let mut found_versions = Vec::new(); - for (i, (candidate_info, _)) in resolve_candidates(resolver, resource, cache)? - .iter() - .enumerate() - { - found_versions.push(candidate_info.version.clone()); - if constraint.matches(&candidate_info.version) { - valid_candidates.insert(i); - } - } - if valid_candidates.is_empty() { - let mut versions = String::new(); - // `found_versions` must contain at least one element - write!(versions, "`{}`", found_versions[0]).unwrap(); - for v in &found_versions[1..] { - write!(versions, ", `{}`", v).unwrap(); - } - return Err(InternalSolverError::VersionNotAvailable(format!( - "project `{}`\n\ - was found, but the requested version constraint `{}`\n\ - was not satisfied by any of the found versions:\n\ - {}", - resource, constraint, versions - ))); - } + let mut valid_candidates = HashSet::new(); - depmap.insert( - DependencyIdentifier::Remote(resource.clone()), - DiscreteHashSet::Finite(valid_candidates), - ); - } else { - // Check that the project can be found - resolve_candidates(resolver, resource, cache)?; - // TODO: reenable this when it's fixed to give better error messages - // https://github.com/pubgrub-rs/pubgrub/pull/216 - // match resolve_candidates(resolver, &usage.resource, cache) { - // Ok(_) => (), - // Err(err) => return Ok(pubgrub::Dependencies::Unavailable(err.to_string())), - // }; - - depmap.insert( - DependencyIdentifier::Remote(resource.clone()), - DiscreteHashSet::empty().complement(), - ); - } - } - InterchangeProjectUsageG::Url { - url, - publisher, - name, - } => { - // TODO: use concrete resolver for DEREFERENCEABLE URL, it must also check that publisher/name match - todo!() - } - InterchangeProjectUsageG::Path { - path, - publisher, - name, - } => { - // TODO: use concrete resolver for RELATIVE PATH, it must also check that publisher/name match - todo!() - } - InterchangeProjectUsageG::Git { - git, - id, - publisher, - name, - } => { - // TODO: use concrete resolver for GIT, it has to find the project in the repo - todo!() + // let id = Identifier::from_interchange_usage(usage); + // let mut new_base_path = None; + let mut found_versions = Vec::new(); + for (i, (candidate_info, _)) in resolve_candidates(resolver, usage, base_path, cache)? + .iter() + .enumerate() + { + found_versions.push(candidate_info.version.clone()); + if usage.version_satisfies_req(&candidate_info.version) { + valid_candidates.insert(i); + // Project must exist in cache if it's resolved + // let pr = &cache.get(&id).unwrap()[i].2; + // FIXME: this is an ugly hack that may resolve deps incorrectly if + // the candidate that we actually use to resolve deps is different from the + // first one that returns a base path + // if new_base_path.is_none() + // && let Some(bp) = pr.base_path_for_usage_resolver() + // { + // new_base_path = Some(bp) + // } } - InterchangeProjectUsageG::Index { - publisher, - name, - version_constraint, - } => { - // TODO: use concrete resolver for INDEX - todo!() + } + if valid_candidates.is_empty() { + let mut versions = String::new(); + // `found_versions` must contain at least one element + write!(versions, "`{}`", found_versions[0]).unwrap(); + for v in &found_versions[1..] { + write!(versions, ", `{}`", v).unwrap(); } + return Err(InternalSolverError::VersionNotAvailable(format!( + "project {usage}\n\ + was found, but the requested version constraint\n\ + was not satisfied by any of the found versions:\n\ + {versions}", + ))); } + + let sources = depmap.insert( + DependencyIdentifier::Remote( + usage.to_owned(), + base_path.map(|p| p.as_ref().to_owned()), + ), + DiscreteHashSet::Finite(valid_candidates), + ); } Ok(pubgrub::Dependencies::Available(depmap)) @@ -420,20 +387,21 @@ impl Display for SolverError { pubgrub::PubGrubError::ErrorRetrievingDependencies { package, source, .. } => match package { - DependencyIdentifier::Requested(_) => { + DependencyIdentifier::Requested(..) => { write!(f, "failed to retrieve project(s): {source}") } - DependencyIdentifier::Remote(iri) => { - write!(f, "failed to retrieve usages of `{iri}`: {source}") + DependencyIdentifier::Remote(usage, ..) => { + // TODO: better error message here, publisher/name should be sufficient? + write!(f, "failed to retrieve usages of {usage}: {source}") } }, pubgrub::PubGrubError::ErrorChoosingVersion { package, source } => match package { - DependencyIdentifier::Requested(_) => { + DependencyIdentifier::Requested(..) => { // `fn choose_version()` is infallible in this path unreachable!(); } - DependencyIdentifier::Remote(iri) => { - write!(f, "unable to select version of `{iri}`: {source}") + DependencyIdentifier::Remote(usage, _) => { + write!(f, "unable to select version of {usage}: {source}") } }, pubgrub::PubGrubError::ErrorInShouldCancel(_) => { @@ -454,21 +422,27 @@ pub enum InternalSolverError { // InvalidProject, /// Project not found by current resolver /// Value is the formatted error message - #[error("project with IRI `{0}` not found: {1}")] - NotFound(Box, String), + #[error("project {0} not found: {1}")] + NotFound(InterchangeProjectUsage, String), /// Project candidates were found, but none of them were /// valid. /// Value is the formatted error message - #[error("no valid candidates found for project `{0}`")] - NoValidCandidates(Box), + #[error("no valid candidates found for project {0}")] + NoValidCandidates(InterchangeProjectUsage), /// Project not found by current resolver /// Value is the formatted error message - #[error("IRI is of type not supported by this resolver: {0}")] - UnsupportedIriType(String), + #[error("usage {usage} is of type not supported by this resolver: {reason}")] + UnsupportedUsageType { + usage: InterchangeProjectUsage, + reason: String, + }, /// Project is found, but the requested version is not /// Value is the formatted error message #[error("requested version unavailable: {0}")] VersionNotAvailable(String), + /// Resolution failed due to an invalid usage that is in principle supported + #[error("usage is not resolvable: {0}")] + Unresolvable(String), } impl ProjectSolver { @@ -521,21 +495,22 @@ impl DependencyProvider for ProjectSolver } DiscreteHashSet::CoFinite(hash_set) => { match package { - DependencyIdentifier::Requested(_) => { + DependencyIdentifier::Requested(..) => { log::debug!("unknown version for request"); Ok(None) } - DependencyIdentifier::Remote(iri) => { + DependencyIdentifier::Remote(usage, base_path) => { let candidate_versions = resolve_candidates( &self.resolver, - iri, + usage, + base_path.as_ref(), &mut self.resolved_candidates.borrow_mut(), )?; let mut versions_indexes: Vec<(usize, semver::Version)> = candidate_versions .into_iter() .enumerate() - // Versions are usually returned in ascending order. + // Versions are usually returned in ascending order from registry/env. // Since we need them in descending order, sort will need // to perform less work if the iterator is reversed .rev() @@ -549,15 +524,13 @@ impl DependencyProvider for ProjectSolver for (i, v) in versions_indexes.iter() { if !hash_set.contains(i) { found = Some(*i); - log::debug!("chose version for `{}`: {}", iri.as_str(), v); + log::debug!("chose version for {usage}: {v}"); break; } } if found.is_none() { log::debug!( - "no allowed versions for `{}`, considered: {:?}", - iri.as_str(), - versions_indexes + "no allowed versions for {usage}, considered: {versions_indexes:?}" ); } @@ -574,32 +547,45 @@ impl DependencyProvider for ProjectSolver version: &Self::V, ) -> Result, Self::Err> { match package { - DependencyIdentifier::Requested(usages) => compute_deps( + DependencyIdentifier::Requested(usages, base_path) => compute_deps( &self.resolver, usages, + base_path.as_ref(), &mut self.resolved_candidates.borrow_mut(), ), - DependencyIdentifier::Remote(iri) => { - let info = { + DependencyIdentifier::Remote(usage, base_path) => { + let identifier = Identifier::from_interchange_usage(usage); + let (info, usage_resolve_path) = { let candidates = resolve_candidates( &self.resolver, - iri, + usage, + base_path.as_ref(), &mut self.resolved_candidates.borrow_mut(), )?; if *version >= candidates.len() { return Ok(pubgrub::Dependencies::Unavailable(format!( - "cannot resolve IRI `{}` to valid project", - iri + "cannot resolve {usage} to valid project" ))); } else { - candidates[*version].0.clone() + // TODO: find a better solution than abusing cache like this + let cached_project_candidates = + &self.resolved_candidates.borrow()[&identifier]; + ( + candidates[*version].0.clone(), + cached_project_candidates[*version] + .2 + .base_path_for_usage_resolver() + .to_owned() + .map(|p| p.to_owned()), + ) } }; compute_deps( &self.resolver, &info.usage, + usage_resolve_path, &mut self.resolved_candidates.borrow_mut(), ) } @@ -608,7 +594,7 @@ impl DependencyProvider for ProjectSolver } type Solution = HashMap< - Iri, + Identifier, ( InterchangeProjectInfo, InterchangeProjectMetadataRaw, @@ -618,11 +604,12 @@ type Solution = HashMap< pub fn solve( requested: Vec, + base_path: Option, resolver: R, ) -> Result, SolverError> { let solver = ProjectSolver::new(resolver); - let package = DependencyIdentifier::Requested(requested); + let package = DependencyIdentifier::Requested(requested, base_path); let version: usize = 0; @@ -631,7 +618,7 @@ pub fn solve( let mut map = solver.resolved_candidates.replace(HashMap::default()); let mut result: HashMap< - fluent_uri::Iri, + Identifier, ( InterchangeProjectInfo, InterchangeProjectMetadataRaw, @@ -641,10 +628,11 @@ pub fn solve( > = HashMap::default(); for (k, idx) in solution { - if let DependencyIdentifier::Remote(uri) = k { - let mut extracted = map.remove(&uri).expect("internal solver error"); + if let DependencyIdentifier::Remote(usage, _base_path) = k { + let identifier = Identifier::from_interchange_usage(&usage); + let mut extracted = map.remove(&identifier).expect("internal solver error"); - result.insert(uri, extracted.swap_remove(idx)); + result.insert(identifier, extracted.swap_remove(idx)); } } diff --git a/macros/src/lib.rs b/macros/src/lib.rs index 7bcdd7e9..3d0a03e9 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -144,6 +144,16 @@ pub fn project_read_derive(input: TokenStream) -> TokenStream { .get_project() .map_err(#error_ident::#variant_ident) }, + // project_root_match + quote! { + #enum_ident::#variant_ident(project) => project + .project_root() + }, + // base_path_for_usage_resolver_match + quote! { + #enum_ident::#variant_ident(project) => project + .base_path_for_usage_resolver() + }, // read_source_match quote! { #enum_ident::#variant_ident(project) => project @@ -167,6 +177,7 @@ pub fn project_read_derive(input: TokenStream) -> TokenStream { } }; + #[expect(clippy::type_complexity)] let ( variant_list, error_variants, @@ -176,6 +187,8 @@ pub fn project_read_derive(input: TokenStream) -> TokenStream { source_reader_match, source_reader_args, get_project_match, + project_root_match, + base_path_for_usage_resolver_match, read_source_match, sources_match, ): ( @@ -189,6 +202,8 @@ pub fn project_read_derive(input: TokenStream) -> TokenStream { Vec<_>, Vec<_>, Vec<_>, + Vec<_>, + Vec<_>, ) = variant_parts.iter().cloned().multiunzip(); let expanded = quote! { @@ -237,6 +252,18 @@ pub fn project_read_derive(input: TokenStream) -> TokenStream { } } + fn project_root(&self) -> ::std::option::Option<&::camino::Utf8Path> { + match self { + #( #project_root_match ),* + } + } + + fn base_path_for_usage_resolver(&self) -> ::std::option::Option<&::camino::Utf8Path> { + match self { + #( #base_path_for_usage_resolver_match ),* + } + } + type SourceReader<'a> = #source_reader_ident< #( #source_reader_args ),* diff --git a/sysand/src/cli.rs b/sysand/src/cli.rs index b6544a59..5e3df03e 100644 --- a/sysand/src/cli.rs +++ b/sysand/src/cli.rs @@ -11,7 +11,10 @@ use camino::Utf8PathBuf; use clap::{ValueEnum, builder::StyledStr, crate_authors}; use fluent_uri::Iri; use semver::VersionReq; -use sysand_core::build::KparCompressionMethod; +use sysand_core::{ + build::KparCompressionMethod, + model::{GitId, InterchangeProjectUsage}, +}; use crate::env_vars; @@ -264,6 +267,20 @@ pub enum ExpCommand { }, /// Remove a usage Remove { publisher: String, name: String }, + /// Clone a project + Clone { + #[command(subcommand)] + locator: ExpCloneLocatorArgs, + /// Path to clone the project into. If already exists, must + /// be an empty directory. Defaults to current directory + #[arg(long, short, default_value = None, verbatim_doc_comment)] + target: Option, + /// Don't resolve or install dependencies + #[arg(long)] + no_deps: bool, + #[command(flatten)] + resolution_opts: ResolutionOptions, + }, } #[derive(clap::Subcommand, Debug, Clone)] @@ -334,6 +351,117 @@ pub struct ExpGitOptions { pub branch: Option, } +// TODO: don't require publisher/name +#[derive(clap::Subcommand, Debug, Clone)] +#[group(id = "expadd", required = true, multiple = false)] +pub enum ExpCloneLocatorArgs { + /// Clone a project from HTTP(S) URL + Url { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// URL of the project. Can point to a KPAR or a project directory + url: Iri, + }, + // TODO: does it make sense to allow kpar or src? + /// Clone a project from a local path + #[clap(verbatim_doc_comment)] + Path { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// Path to the project. Can be relative or absolute, and can point + /// to either a KPAR or a project directory + #[clap(verbatim_doc_comment)] + path: Utf8PathBuf, + }, + /// Clone a project from an index + Index { + /// Publisher of the project + publisher: String, + /// Name of the project + name: String, + /// Version constraint + // TODO: make this optional and default to latest stable version, like Cargo + version_constraint: VersionReq, + }, + /// Clone a project from a git repository. + #[clap(verbatim_doc_comment)] + Git { + /// Publisher of the project + publisher: String, + /// Name of the project. Publisher and name + /// identify the project anywhere within the repository + #[clap(verbatim_doc_comment)] + name: String, + /// URL of the repository. If none of the `rev`/`tag`/`branch` are given, + /// latest rev of the default branch will be used. + #[clap(value_name = "URL", verbatim_doc_comment)] + git: Iri, + #[command(flatten)] + options: ExpGitOptions, + }, +} + +impl From for InterchangeProjectUsage { + fn from(value: ExpCloneLocatorArgs) -> Self { + match value { + ExpCloneLocatorArgs::Url { + publisher, + name, + url, + } => InterchangeProjectUsage::Url { + url, + publisher, + name, + }, + ExpCloneLocatorArgs::Path { + publisher, + name, + path, + } => InterchangeProjectUsage::Path { + path: path.into(), + publisher, + name, + }, + ExpCloneLocatorArgs::Index { + publisher, + name, + version_constraint, + } => InterchangeProjectUsage::Index { + publisher, + name, + version_constraint, + }, + ExpCloneLocatorArgs::Git { + publisher, + name, + git, + options, + } => { + let ExpGitOptions { rev, tag, branch } = options; + let id = if let Some(rev) = rev { + GitId::Rev(rev) + } else if let Some(tag) = tag { + GitId::Tag(tag) + } else if let Some(branch) = branch { + GitId::Branch(branch) + } else { + unreachable!() + }; + InterchangeProjectUsage::Git { + git, + id, + publisher, + name, + } + } + } + } +} + #[derive(clap::Args, Debug, Clone)] #[group(required = true, multiple = false)] pub struct AddProjectLocatorArgs { @@ -385,8 +513,8 @@ pub struct CloneProjectLocatorArgs { /// IRI/URI/URL identifying the project to be cloned #[arg(short = 'i', long, visible_alias = "uri", visible_alias = "url")] pub iri: Option>, - /// Path to clone the project from. If version is also - /// given, verifies that the project has the given version + /// Path to clone the project from. Can be relative or absolute. If version + /// is also given, verifies that the project has the given version // TODO: allow somehow requiring to use git here #[arg( long, diff --git a/sysand/src/commands/add.rs b/sysand/src/commands/add.rs index 87229648..b5ca335c 100644 --- a/sysand/src/commands/add.rs +++ b/sysand/src/commands/add.rs @@ -17,7 +17,7 @@ use sysand_core::{ }, context::ProjectContext, lock::Source, - model::{GitId, InterchangeProjectUsageRaw}, + model::{GitId, InterchangeProjectUsageG, InterchangeProjectUsageRaw}, project::{ ProjectRead, utils::{relativize_path, wrapfs}, @@ -51,7 +51,7 @@ pub fn command_add( let iri = iri.as_ref(); let mut current_project = ctx .current_project - .clone() + // .clone() .ok_or(CliError::MissingProjectCurrentDir)?; #[allow(clippy::manual_map)] // For readability and compactness @@ -93,7 +93,10 @@ pub fn command_add( runtime.clone(), auth_policy.clone(), ); - let outcome = std_resolver.resolve_read_raw(&url)?; + let outcome = std_resolver.resolve_read( + &InterchangeProjectUsageG::from_iri(url), + Some(current_project.root_path()), + )?; let mut source = None; match outcome { ResolutionOutcome::Resolved(alternatives) => { @@ -111,9 +114,15 @@ pub fn command_add( } } } - ResolutionOutcome::UnsupportedUsageType(e) => bail!("unsupported URL `{url}`:\n{e}"), - ResolutionOutcome::NotFound(e) => { - bail!("failed to resolve URL `{url}`:\n{e}") + ResolutionOutcome::UnsupportedUsageType { usage, reason } => { + bail!("unsupported URL {usage}:\n{reason}") + } + ResolutionOutcome::NotFound(usage, reason) => { + bail!("failed to resolve URL {usage}:\n{reason}") + } + ResolutionOutcome::InvalidUsage(..) => unreachable!(), + ResolutionOutcome::Unresolvable(msg) => { + bail!("usage is not resolvable: {msg}") } } if source.is_none() { @@ -338,126 +347,7 @@ pub fn command_add_experimental( .clone() .ok_or(CliError::MissingProjectCurrentDir)?; - // #[allow(clippy::manual_map)] // For readability and compactness - // let source = if let Some(path) = source_opts.from_path { - // let metadata = wrapfs::metadata(&path)?; - // if metadata.is_dir() { - // Some(Source::LocalSrc { - // src_path: get_relative(path, current_project.root_path())? - // .as_str() - // .into(), - // }) - // } else if metadata.is_file() { - // Some(Source::LocalKpar { - // kpar_path: get_relative(path, current_project.root_path())? - // .as_str() - // .into(), - // }) - // } else { - // bail!("path `{path}` is neither a directory nor a file"); - // } - // } else if let Some(url) = source_opts.from_url { - // let ResolutionOptions { - // index, - // default_index, - // no_index, - // include_std: _, - // } = resolution_opts.clone(); - - // let index_urls = if no_index { - // None - // } else { - // Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) - // }; - // let std_resolver = standard_resolver( - // None, - // None, - // Some(client.clone()), - // index_urls, - // runtime.clone(), - // auth_policy.clone(), - // ); - // let outcome = std_resolver.resolve_read_raw(&url)?; - // let mut source = None; - // match outcome { - // ResolutionOutcome::Resolved(alternatives) => { - // for candidate in alternatives { - // match candidate { - // Ok(project) => { - // source = project.sources(&ctx)?.first().cloned(); - // if source.is_some() { - // break; - // } - // } - // Err(err) => { - // log::debug!("skipping candidate project: {err}"); - // } - // } - // } - // } - // ResolutionOutcome::UnsupportedIRIType(e) => bail!("unsupported URL `{url}`:\n{e}"), - // ResolutionOutcome::Unresolvable(e) => { - // bail!("failed to resolve URL `{url}`:\n{e}") - // } - // } - // if source.is_none() { - // bail!("unable to find project at URL `{url}`") - // } - // source - // } else if let Some(editable) = source_opts.as_editable { - // Some(Source::Editable { - // editable: get_relative(editable, current_project.root_path())? - // .as_str() - // .into(), - // }) - // } else if let Some(src_path) = source_opts.as_local_src { - // Some(Source::LocalSrc { - // src_path: get_relative(src_path, current_project.root_path())? - // .as_str() - // .into(), - // }) - // } else if let Some(kpar_path) = source_opts.as_local_kpar { - // Some(Source::LocalKpar { - // kpar_path: get_relative(kpar_path, current_project.root_path())? - // .as_str() - // .into(), - // }) - // } else if let Some(remote_src) = source_opts.as_remote_src { - // Some(Source::RemoteSrc { - // remote_src: remote_src.into_string(), - // }) - // } else if let Some(remote_kpar) = source_opts.as_remote_kpar { - // Some(Source::RemoteKpar { - // remote_kpar: remote_kpar.into_string(), - // remote_kpar_size: None, - // }) - // // TODO: make all --as-* use new-style usages unconditionally, otherwise will need two impl for them - // } else if let Some(remote_git) = source_opts.as_remote_git { - // Some(Source::RemoteGit { - // remote_git: remote_git.into_string(), - // rev: todo!(), - // path: todo!(), - // }) - // } else { - // None - // }; - - // if let Some(source) = source { - // let config_path = config_file - // .map(Utf8PathBuf::from) - // .or((!no_config).then(|| current_project.root_path().join(CONFIG_FILE))); - - // if let Some(path) = config_path { - // add_project_source_to_config(&path, iri, &source)?; - // } else { - // log::warn!("project source for `{iri}` not added to any config file"); - // } - - // config.projects.push(ConfigProject { - // identifiers: vec![iri.to_owned()], - // sources: vec![source], - // }); - // } + // TODO: support source overrides let usage = match locator { ExpAddProjectLocatorArgs::Url { diff --git a/sysand/src/commands/clone.rs b/sysand/src/commands/clone.rs index 8f792bde..2f814df2 100644 --- a/sysand/src/commands/clone.rs +++ b/sysand/src/commands/clone.rs @@ -11,7 +11,13 @@ use sysand_core::{ config::Config, context::ProjectContext, env::utils::clone_project, - project::{ProjectRead, editable::EditableProject, local_src::LocalSrcProject, utils::wrapfs}, + model::InterchangeProjectUsage, + project::{ + ProjectRead, + editable::EditableProject, + local_src::LocalSrcProject, + utils::{Identifier, wrapfs}, + }, resolve::{ ResolutionOutcome, ResolveRead, memory::{AcceptAll, MemoryResolver}, @@ -22,7 +28,7 @@ use sysand_core::{ use crate::{ CliError, DEFAULT_INDEX_URL, - cli::{CloneProjectLocatorArgs, ResolutionOptions}, + cli::{CloneProjectLocatorArgs, ExpCloneLocatorArgs, ResolutionOptions}, commands::sync::command_sync, get_or_create_env, }; @@ -163,6 +169,204 @@ pub fn command_clone( Ok(()) } +/// Clones project from `locator` to `target` directory. +#[allow(clippy::too_many_arguments)] +pub fn command_clone_experimental( + locator: ExpCloneLocatorArgs, + // TODO: is version useful? + // version: Option, + target: Option, + ctx: ProjectContext, + no_deps: bool, + resolution_opts: ResolutionOptions, + config: &Config, + client: reqwest_middleware::ClientWithMiddleware, + runtime: Arc, + auth_policy: Arc, +) -> Result<()> { + let usage: InterchangeProjectUsage = locator.into(); + let target: Utf8PathBuf = target.unwrap_or_else(|| ".".into()); + let project_path = { + // Canonicalization is performed only for better error messages + let canonical = wrapfs::absolute(&target)?; + match fs::read_dir(&target) { + Ok(mut dir_it) => { + if dir_it.next().is_some() { + bail!("target directory not empty: `{}`", canonical) + } + } + Err(e) => match e.kind() { + ErrorKind::NotFound => { + wrapfs::create_dir_all(&canonical)?; + } + ErrorKind::NotADirectory => { + bail!("target path `{}` is not a directory", canonical) + } + e => { + bail!("failed to get metadata for `{}`: {}", canonical, e); + } + }, + } + canonical + }; + + let (include_std, locator, local_project, std_resolver) = match { + let ResolutionOptions { + index, + default_index, + no_index, + include_std, + } = resolution_opts; + if let Some(existing_project) = &ctx.current_project { + log::warn!( + "found an existing project in one of target path's parent\n\ + {:>8} directories `{}`", + ' ', + existing_project.root_path() + ); + } + if let Some(existing_workspace) = &ctx.current_workspace { + log::warn!( + "found an existing workspace in one of target path's parent\n\ + {:>8} directories `{}`", + ' ', + existing_workspace.root_path() + ); + } + let index_urls = if no_index { + None + } else { + Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) + }; + // let CloneProjectLocatorArgs { + // auto_location, + // iri, + // path, + // } = locator; + // let locator = if let Some(auto_location) = auto_location { + // match fluent_uri::Iri::parse(auto_location) { + // Ok(iri) => ProjectLocator::Iri(iri), + // Err((_e, path)) => ProjectLocator::Path(path.into()), + // } + // } else if let Some(path) = path { + // ProjectLocator::Path(path) + // } else if let Some(iri) = iri { + // ProjectLocator::Iri(iri) + // } else { + // unreachable!() + // }; + let cloning = "Cloning"; + let cloned = "Cloned"; + let header = sysand_core::style::get_style_config().header; + + let mut local_project = LocalSrcProject { + nominal_path: None, + project_path: project_path, + }; + + let std_resolver = standard_resolver( + None, + None, + Some(client.clone()), + index_urls, + runtime.clone(), + auth_policy.clone(), + ); + log::info!( + "{header}{cloning:>12}{header:#} project {usage} to\n\ + {:>12} `{}`", + ' ', + local_project.project_path, + ); + let (_version, storage) = + get_project_version(&usage, Some(&ctx.current_directory), version, &std_resolver)?; + let (info, _meta) = clone_project(&storage, &mut local_project, true)?; + log::info!( + "{header}{cloned:>12}{header:#} `{}` {}", + info.name, + info.version + ); + + Ok((include_std, locator, local_project, std_resolver)) + } { + Ok(ret) => ret, + Err(e) => { + // Clean up the target dir. This is safe, since we ensured + // that the dir is empty before touching it + clean_dir(&target); + return Err(e); + } + }; + + // Update project context with the new cloned project + // TODO: Consider under which circumstances (if any) + // the workspace should carry over. + let ctx = ProjectContext { + current_workspace: None, + current_project: Some(local_project.clone()), + current_directory: ctx.current_directory, + }; + + if !no_deps { + let provided_iris = if !include_std { + crate::known_std_libs() + } else { + HashMap::default() + }; + let mut memory_projects = HashMap::default(); + for (k, v) in provided_iris.iter() { + memory_projects.insert(fluent_uri::Iri::parse(k.clone()).unwrap(), v.to_vec()); + } + + let resolver = PriorityResolver::new( + MemoryResolver { + iri_predicate: AcceptAll {}, + projects: memory_projects, + }, + std_resolver, + ); + let project = EditableProject::new(".".into(), local_project); + let identifiers = vec![Identifier::from_interchange_usage(&usage)]; + let LockOutcome { + lock, + dependencies: _dependencies, + } = sysand_core::commands::lock::do_lock_projects( + [(identifiers, &project)], + resolver, + &provided_iris, + &ctx, + )?; + // Warn if we have any std lib dependencies + if !provided_iris.is_empty() + && lock + .projects + .iter() + .any(|x| x.identifiers.iter().any(|y| provided_iris.contains_key(y))) + { + crate::logger::warn_std_deps(); + } + let lock = lock.canonicalize(); + wrapfs::write( + project.inner().project_path.join(DEFAULT_LOCKFILE_NAME), + lock.to_string(), + )?; + + let mut env = get_or_create_env(&project.inner().project_path)?; + command_sync( + &lock, + &project.inner().project_path, + &mut env, + client, + &provided_iris, + runtime, + auth_policy, + &ctx, + )?; + } + + Ok(()) +} + #[expect(clippy::too_many_arguments)] fn obtain_project( locator: CloneProjectLocatorArgs, @@ -245,6 +449,7 @@ fn obtain_project( auth_policy.clone(), ); match &locator { + // TODO: leave this as-is for now, add `sysand experimental clone` ProjectLocator::Iri(iri) => { log::info!( "{header}{cloning:>12}{header:#} project with IRI `{}` to\n\ @@ -253,7 +458,15 @@ fn obtain_project( ' ', local_project.project_path, ); - let (_version, storage) = get_project_version(iri, version, &std_resolver)?; + let (_version, storage) = get_project_version( + &InterchangeProjectUsage::Resource { + resource: iri.to_owned(), + version_constraint: None, + }, + None, + version, + &std_resolver, + )?; let (info, _meta) = clone_project(&storage, &mut local_project, true)?; log::info!( "{header}{cloned:>12}{header:#} `{}` {}", @@ -264,7 +477,7 @@ fn obtain_project( ProjectLocator::Path(path) => { let remote_project = LocalSrcProject { nominal_path: None, - project_path: path.into(), + project_path: wrapfs::canonicalize(path)?, }; if let Some(version) = version { let project_version = remote_project @@ -296,15 +509,17 @@ fn obtain_project( Ok((include_std, locator, local_project, std_resolver)) } -/// Obtains a project identified by `iri` via `resolver`. If +/// Obtains a project identified by `usage` via `resolver`. If /// version is given, obtains exactly that version. If not, /// obtains the latest version (including prerelease versions) pub fn get_project_version( - iri: &Iri, + usage: &InterchangeProjectUsage, + base_path: Option>, + // TODO: get rid of `version` version: Option, resolver: &R, ) -> Result<(semver::Version, R::ProjectStorage), anyhow::Error> { - match resolver.resolve_read(iri)? { + match resolver.resolve_read(usage, base_path)? { ResolutionOutcome::Resolved(alternatives) => { // If no version is supplied, choose the highest // Else, choose version that is supplied @@ -385,8 +600,8 @@ pub fn get_project_version( iri.scheme(), iri ), - ResolutionOutcome::NotFound(e) => { - bail!("failed to resolve project `{iri}`: {e}") + ResolutionOutcome::NotFound(usage, reason) => { + bail!("project {usage} not found: {e}") } } } diff --git a/sysand/src/commands/env.rs b/sysand/src/commands/env.rs index de580af5..4e454aea 100644 --- a/sysand/src/commands/env.rs +++ b/sysand/src/commands/env.rs @@ -44,11 +44,14 @@ pub fn command_env>(path: P) -> Result( - iri: Iri, - version: Option, + usage: InterchangeProjectUsage, + // TODO: take versionreq in cli and package it into usage + // version: Option, install_opts: InstallOptions, resolution_opts: ResolutionOptions, config: &Config, @@ -59,6 +62,7 @@ pub fn command_env_install( ctx: ProjectContext, ) -> Result<()> { let project_root = project_root.unwrap_or(wrapfs::current_dir()?); + let base_path = Some(Utf8Path::new(".")); let mut env = crate::get_or_create_env(project_root.as_path())?; let InstallOptions { allow_overwrite, @@ -72,7 +76,6 @@ pub fn command_env_install( include_std, } = resolution_opts; - // TODO: should probably first check that current project exists let provided_iris = if !include_std { let sysml_std = crate::known_std_libs(); if sysml_std.contains_key(iri.as_ref()) { @@ -126,7 +129,7 @@ pub fn command_env_install( // mechanisms depending on no_deps if no_deps { let (version, storage) = - crate::commands::clone::get_project_version(&iri, version, &resolver)?; + crate::commands::clone::get_project_version(&usage, base_path, version, &resolver)?; sysand_core::commands::env::do_env_install_project( &iri, &storage, @@ -136,17 +139,18 @@ pub fn command_env_install( )?; add_single_env_project(iri, version.to_string(), env)?; } else { - let usages = vec![InterchangeProjectUsage::Resource { - resource: fluent_uri::Iri::from_str(iri.as_ref())?, - version_constraint: version.map(|v| semver::VersionReq::parse(&v)).transpose()?, - }]; + // let usages = [InterchangeProjectUsage::Resource { + // resource: fluent_uri::Iri::from_str(iri.as_ref())?, + // version_constraint: version.map(|v| semver::VersionReq::parse(&v)).transpose()?, + // }]; let LockOutcome { lock, dependencies: _dependencies, } = sysand_core::commands::lock::do_lock_extend( Lock::default(), - usages, + [usage], + base_path.map(|p| p.to_owned()), resolver, &provided_iris, &ctx, @@ -178,6 +182,7 @@ pub fn command_env_install( } // TODO: Collect common arguments +/// Install project from `path` as `iri` #[allow(clippy::too_many_arguments)] pub fn command_env_install_path( iri: Iri, diff --git a/sysand/src/commands/sync.rs b/sysand/src/commands/sync.rs index 969725dc..064ad0d6 100644 --- a/sysand/src/commands/sync.rs +++ b/sysand/src/commands/sync.rs @@ -14,7 +14,7 @@ use sysand_core::{ lock::Lock, project::{ AsSyncProjectTokio, ProjectReadAsync, - gix_git_download::{GixDownloadedError, GixDownloadedProject}, + gix_git_download::{GixDownloadedError, GixDownloadedProject, GixDownloadedProjectExact}, local_kpar::LocalKParProject, local_src::LocalSrcProject, memory::InMemoryProject, @@ -64,8 +64,8 @@ pub fn command_sync, Policy: HTTPAuthentication>( ) }, ), - Some(|remote_git: String| -> Result { - GixDownloadedProject::new(remote_git) + Some(|remote_git: &str, rev:String, path: Option| -> Result { + GixDownloadedProjectExact::new_download(remote_git,rev,path) }), provided_iris, )?; diff --git a/sysand/src/lib.rs b/sysand/src/lib.rs index 6b27abe6..f3fc6af6 100644 --- a/sysand/src/lib.rs +++ b/sysand/src/lib.rs @@ -49,6 +49,7 @@ use crate::{ commands::{ add::{command_add, command_add_experimental}, build::{command_build_for_project, command_build_for_workspace}, + clone::command_clone_experimental, env::{ command_env, command_env_install, command_env_install_path, command_env_list, command_env_uninstall, @@ -765,6 +766,22 @@ pub fn run_cli(args: cli::Args) -> Result<()> { args.global_opts.config_file, args.global_opts.no_config, ), + ExpCommand::Clone { + locator, + target, + no_deps, + resolution_opts, + } => command_clone_experimental( + locator, + target, + ctx, + no_deps, + resolution_opts, + &config, + client, + runtime, + auth_policy, + ), } } }