From d9b34470c86fce70bb59316b1ada7ef0b82b557b Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Tue, 10 Mar 2026 09:24:01 +0100 Subject: [PATCH 01/15] chore: update workspace dependencies for lockfile-v7 branch --- Cargo.lock | 68 +++++++++++++------------------------- Cargo.toml | 32 +++++++++++++++--- crates/pixi_api/Cargo.toml | 9 ----- crates/pixi_cli/Cargo.toml | 2 -- 4 files changed, 50 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0678af98d0..37a1cf2b38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,8 +1695,7 @@ dependencies = [ [[package]] name = "coalesced_map" version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cf5a7a58a9d5b914bddb0a3a2bd920af2be897114dc8128af022af81fc43b8b" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "dashmap", "tokio", @@ -2743,8 +2742,7 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "file_url" version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81d37aab514a05a249a5b15408dc74d716f5745a2c5daf22e40a245ffd38fa84" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "itertools 0.14.0", "percent-encoding", @@ -5410,8 +5408,7 @@ checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" [[package]] name = "path_resolver" version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59953de32542217edd1cc5fd52ac7ca2de57d7613a3a3e569370e5ea09231df3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "fs-err", @@ -5673,7 +5670,6 @@ dependencies = [ "pypi_modifiers", "rattler_conda_types", "rattler_lock", - "rattler_repodata_gateway", "regex", "same-file", "serde", @@ -7423,8 +7419,7 @@ dependencies = [ [[package]] name = "rattler" version = "0.40.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "880f81c4820c3da2d1aa2755f3b216f5581074271f38188f630948d2c12e8a69" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "clap", @@ -7738,8 +7733,7 @@ dependencies = [ [[package]] name = "rattler_cache" version = "0.6.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc379317f53ef1264b9a002a58dfe44285e777483018e04265a7f459e4688471" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -7771,8 +7765,7 @@ dependencies = [ [[package]] name = "rattler_conda_types" version = "0.44.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d81cb5a02ab501c600cdd4aa16280609ed06058c72e5a59ca659c15fa321b368" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7814,8 +7807,7 @@ dependencies = [ [[package]] name = "rattler_config" version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59db0f9b45e3c36ea3494af9c782d33d27f854c7acb0616d0deba199a04516b3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "console 0.16.3", "fs-err", @@ -7832,8 +7824,7 @@ dependencies = [ [[package]] name = "rattler_digest" version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa6239d5be357419ba579b1cda7fe0e140a22134ebc999adb62b818989fbc7c7" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "blake2", "digest", @@ -7910,8 +7901,7 @@ dependencies = [ [[package]] name = "rattler_lock" version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d9d4fb55e1e0d752501f1d7d7c861fe2118b1e9a757478f99fc2dff96a1063d" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7931,13 +7921,13 @@ dependencies = [ "thiserror 2.0.18", "typed-path", "url", + "xxhash-rust", ] [[package]] name = "rattler_macros" version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d0d45ce3ae00333421569d2fafa4b877708a901c3cb217f8d4acfab4328df0" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "quote", "syn", @@ -7946,8 +7936,7 @@ dependencies = [ [[package]] name = "rattler_menuinst" version = "0.2.51" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b5355503570bafe289d127b0f29039708d2b44e7a203af4cfe82f354114b8b" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "configparser", @@ -7977,8 +7966,7 @@ dependencies = [ [[package]] name = "rattler_networking" version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be8a045454dd88bbeb7e1477a51aa6ea8a021626987d7f142bc2403ada17923" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "async-once-cell", @@ -8011,8 +7999,7 @@ dependencies = [ [[package]] name = "rattler_package_streaming" version = "0.24.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31eb86b7afd6d7016e6616a70f3d8f3d1d894354de9c49c6e24a1f508fc611f3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "astral-tokio-tar 0.6.0", "astral_async_zip", @@ -8064,8 +8051,7 @@ dependencies = [ [[package]] name = "rattler_pty" version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec435d69bcc064b5cb0f6a49d8cfc1dbed93f0ec233d4499156ae7c3bc7f90d7" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "libc", "nix 0.30.1", @@ -8076,8 +8062,7 @@ dependencies = [ [[package]] name = "rattler_redaction" version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "961121cad9792daafc176a7f4caafc0fc889fb17507c23a4c86e1777a8b5e179" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "reqwest 0.12.28", "reqwest-middleware", @@ -8087,8 +8072,7 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f6b21fc76bf16aefd85d5f7db45c1c297c2f89663548e421d30c33da5496008" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -8150,8 +8134,7 @@ dependencies = [ [[package]] name = "rattler_s3" version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6e9f4cecd2e0c20888e861ce7821a869db95e30886ca157aea9641582c14af" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "aws-config", "aws-credential-types", @@ -8167,8 +8150,7 @@ dependencies = [ [[package]] name = "rattler_shell" version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b086267abcb1707b839b2e1e227363490f4ee326f4d75a50543a7310cb90f0d" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "enum_dispatch", @@ -8188,8 +8170,7 @@ dependencies = [ [[package]] name = "rattler_solve" version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bbee4677ebf3d04eec66e8858f7a45ed538b16a39aadaa49db1526325839aa3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "futures", @@ -8207,8 +8188,7 @@ dependencies = [ [[package]] name = "rattler_upload" version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a70331457f64c15836cb54eb16b7f129d3097181b3836ca8b37fd56c49c93f3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "base64 0.22.1", "clap", @@ -8244,8 +8224,7 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" version = "2.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3817df418c36b7b9ca3732ab35f2aba0c9a6349c5edfc5048711e5a96d70550" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "archspec", "libloading", @@ -9913,8 +9892,7 @@ dependencies = [ [[package]] name = "simple_spawn_blocking" version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55c0b0b683828aa9d4f5c0e59b0c856a12c30a65b5f1ca4292664734d76fa9c2" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "tokio", ] diff --git a/Cargo.toml b/Cargo.toml index 97b372fd9a..98781749d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,11 @@ [workspace] -default-members = ["crates/pixi"] +default-members = [ + "crates/pixi", +] exclude = [ # Only pulled in when enabling certain features. We do not want to include # these crates when running workspace wide commands. "crates/pixi_allocator", - # Debug output directories that don't contain Cargo.toml - "crates/pixi-build-cmake", - "crates/pixi-build-python", # Standalone projects that should not be part of the workspace "docs", "examples", @@ -88,7 +87,7 @@ ordermap = "1.0.0" parking_lot = "0.12.4" pathdiff = "0.2.3" pep440_rs = "0.7.3" -pep508_rs = "0.9.2" +pep508_rs = { version = "0.9.2", features = ["non-pep508-extensions"] } percent-encoding = "2.3.1" pin-project-lite = "0.2.16" pixi_allocator = { path = "crates/pixi_allocator" } @@ -257,6 +256,29 @@ reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "d8efd77673c9a90792da9da31b6c0da7ea8a324b" } +coalesced_map = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +file_url = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_cache = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_conda_types = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_config = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_digest = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_lock = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_menuinst = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_networking = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_package_streaming = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_repodata_gateway = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_shell = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_solve = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_upload = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_redaction = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_virtual_packages = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +rattler_s3 = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +simple_spawn_blocking = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } + +[patch."https://github.com/prefix-dev/rattler-build"] +#rattler-build = { path = "/var/home/tobias/src/rattler-build" } + [profile.ci] codegen-units = 16 inherits = "release" diff --git a/crates/pixi_api/Cargo.toml b/crates/pixi_api/Cargo.toml index 6f3a5939a3..c9d961d7f3 100644 --- a/crates/pixi_api/Cargo.toml +++ b/crates/pixi_api/Cargo.toml @@ -8,11 +8,6 @@ readme.workspace = true repository.workspace = true version = "0.1.0" -[features] -default = [] -native-tls = ["rattler_repodata_gateway/native-tls"] -rustls-tls = ["rattler_repodata_gateway/rustls-tls"] - [dependencies] console = { workspace = true } dunce = { workspace = true } @@ -34,10 +29,6 @@ pixi_uv_conversions = { workspace = true } pypi_modifiers = { workspace = true } rattler_conda_types = { workspace = true } rattler_lock = { workspace = true } -rattler_repodata_gateway = { workspace = true, features = [ - "sparse", - "gateway", -] } regex = { workspace = true } same-file = { workspace = true } serde = { workspace = true, features = ["derive"] } diff --git a/crates/pixi_cli/Cargo.toml b/crates/pixi_cli/Cargo.toml index 308fac2c11..3c79eab7d3 100644 --- a/crates/pixi_cli/Cargo.toml +++ b/crates/pixi_cli/Cargo.toml @@ -110,7 +110,6 @@ pixi_pty = { path = "../pixi_pty" } [features] default = [] native-tls = [ - "pixi_api/native-tls", "pixi_auth/native-tls", "pixi_utils/native-tls", "rattler/native-tls", @@ -120,7 +119,6 @@ native-tls = [ # run tests connecting to remote Internet services online_tests = [] rustls-tls = [ - "pixi_api/rustls-tls", "pixi_auth/rustls-tls", "pixi_utils/rustls-tls", "rattler/rustls-tls", From d022c6019a553781f5790b7fff8a18e9f4007a6e Mon Sep 17 00:00:00 2001 From: Tobias Hunger Date: Fri, 27 Feb 2026 13:24:21 +0000 Subject: [PATCH 02/15] feat: list data for pypi packages from cached index data When using `pixi list`, use data from the cached index data that we have stored. Fall back to leaving the data blank if nothing is cached. We do run into the blank data case occasionally: * Some repositories are to not get cached and uv accepts that * Some are not "simple" The `pytorch` index falls into both of those categories. Fixes: #5114 WIP: rattler-overrides, ABANDON THIS chore: Remove jlap support Rattler removed this, so it needs to go here as well. feat: Relative path support for pypi deps in Lockfile This PR replaces #4788! Make pixi handle relative paths to pypi dependencies in the LockFile. This helps when checking in pixi.lock files that reference local python packages. Instead of ending up with a machine specific path in the lock file we have relative path that should work for all developers. This depends on the rattler part which is here: conda/rattler#1760 -- as seen in the feature/lockfile-v7 branch in the `conda/rattler` repository! The PR consists of several changes: - Update to the rattler_lock API changes made in feat: Relative path support in LockFile conda/rattler#1760 - Make sure the non-pep508-extensions are enabled in pixi as well Convert between uv relative paths and pixi relative paths - It adapts pyproject.toml parsing to preserve relative path in python requirements and in the `tool.pixi.pypi-dependencies` section The last is the big improvement over #4788. Relates to: #4680 chore: Handle rattler removing the editable flag from PyPiSourced chore: update to CondaSourceData having lost its input field chore: update to new errors being raised by rattler chore: Add initial platform support to pixi Simplistic approach: It just creates platforms based on conda's `Platform` without any virtual packages. Handle pypi-prerelease-mode no longer being an Option chore_ Update to pypi environment not being in the lockfile anymore chore: Handle optional version in pypi source dependencies We do not want to store the version number in case it is dynamic. chore: Implement support for index_url in pypi packages --- Cargo.lock | 2 + .../pixi/tests/integration_rust/add_tests.rs | 21 +- .../pixi/tests/integration_rust/common/mod.rs | 61 +-- .../pixi/tests/integration_rust/pypi_tests.rs | 489 +++++++++++++++++- .../integration_rust/solve_group_tests.rs | 37 +- .../tests/integration_rust/update_tests.rs | 7 +- crates/pixi_api/src/workspace/init/mod.rs | 2 +- crates/pixi_cli/Cargo.toml | 2 + crates/pixi_cli/src/build.rs | 2 +- crates/pixi_cli/src/tree.rs | 9 +- crates/pixi_cli/src/update.rs | 3 +- .../workspace/export/conda_explicit_spec.rs | 16 +- .../src/build/build_cache.rs | 2 +- .../src/cache/source_metadata.rs | 2 +- .../src/source_build/mod.rs | 2 +- .../src/source_build_cache_status/mod.rs | 2 +- .../src/source_metadata/mod.rs | 14 +- .../tests/integration/main.rs | 4 +- crates/pixi_config/src/lib.rs | 26 - ..._config__tests__config_merge_multiple.snap | 3 - crates/pixi_config/tests/config/config_2.toml | 1 - crates/pixi_core/src/activation.rs | 3 +- crates/pixi_core/src/environment/mod.rs | 12 +- .../pixi_core/src/lock_file/install_subset.rs | 4 +- crates/pixi_core/src/lock_file/mod.rs | 9 +- crates/pixi_core/src/lock_file/outdated.rs | 9 +- .../src/lock_file/records_by_name.rs | 12 +- .../pixi_core/src/lock_file/resolve/pypi.rs | 81 +-- .../src/lock_file/satisfiability/mod.rs | 152 +++--- crates/pixi_core/src/lock_file/update.rs | 92 ++-- crates/pixi_core/src/lock_file/utils.rs | 21 +- .../src/lock_file/virtual_packages.rs | 14 +- crates/pixi_core/src/workspace/mod.rs | 2 +- .../pixi_core/src/workspace/workspace_mut.rs | 14 +- crates/pixi_diff/src/lib.rs | 64 ++- crates/pixi_global/src/install.rs | 24 +- crates/pixi_install_pypi/src/conversions.rs | 25 +- crates/pixi_install_pypi/src/lib.rs | 15 +- crates/pixi_install_pypi/src/plan/models.rs | 6 +- crates/pixi_install_pypi/src/plan/planner.rs | 3 +- .../src/plan/required_dists.rs | 8 +- .../src/plan/test/harness.rs | 52 +- crates/pixi_install_pypi/src/plan/test/mod.rs | 2 +- .../pixi_install_pypi/src/plan/validation.rs | 61 ++- crates/pixi_manifest/src/discovery.rs | 12 +- crates/pixi_manifest/src/feature.rs | 11 +- .../pixi_manifest/src/manifests/workspace.rs | 28 +- crates/pixi_manifest/src/pyproject.rs | 25 +- crates/pixi_manifest/src/target.rs | 20 +- crates/pixi_manifest/src/toml/manifest.rs | 24 +- crates/pixi_manifest/src/toml/package.rs | 58 ++- crates/pixi_manifest/src/toml/pyproject.rs | 171 +++--- crates/pixi_manifest/src/toml/workspace.rs | 32 +- crates/pixi_manifest/src/utils/test_utils.rs | 6 +- crates/pixi_pypi_spec/src/lib.rs | 34 +- crates/pixi_pypi_spec/src/pep508.rs | 9 +- crates/pixi_pypi_spec/src/toml.rs | 33 +- crates/pixi_record/src/lib.rs | 6 + ...d__tests__roundtrip_conda_source_data.snap | 67 +-- crates/pixi_record/src/source_record.rs | 76 +-- crates/pixi_spec/src/lib.rs | 3 +- crates/pixi_uv_context/src/lib.rs | 4 +- .../pixi_uv_conversions/src/requirements.rs | 12 +- .../pixi_config_tomls/main_config.toml | 1 - 64 files changed, 1345 insertions(+), 679 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 37a1cf2b38..77f9a68077 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6006,6 +6006,7 @@ dependencies = [ "miette-arborium", "pathdiff", "pep508_rs", + "percent-encoding", "pixi_api", "pixi_auth", "pixi_build_frontend", @@ -6059,6 +6060,7 @@ dependencies = [ "uv-configuration", "uv-pep508", "uv-pypi-types", + "uv-redacted", "uv-requirements-txt", "which", "zip 2.4.2", diff --git a/crates/pixi/tests/integration_rust/add_tests.rs b/crates/pixi/tests/integration_rust/add_tests.rs index 669b0d7d7a..0afa9e2640 100644 --- a/crates/pixi/tests/integration_rust/add_tests.rs +++ b/crates/pixi/tests/integration_rust/add_tests.rs @@ -810,10 +810,11 @@ preview = ['pixi-build'] .unwrap(); let lock = pixi.lock_file().await.unwrap(); + let p = lock.platform(&Platform::Win64.to_string()).unwrap(); let git_package = lock .default_environment() .unwrap() - .packages(Platform::Win64) + .packages(p) .unwrap() .find(|p| p.as_conda().unwrap().location().as_str().contains("git+")); @@ -864,10 +865,11 @@ preview = ['pixi-build'] .unwrap(); let lock = pixi.lock_file().await.unwrap(); + let p = lock.platform(&Platform::Linux64.to_string()).unwrap(); let git_package = lock .default_environment() .unwrap() - .packages(Platform::Linux64) + .packages(p) .unwrap() .find(|p| p.as_conda().unwrap().location().as_str().contains("git+")); @@ -923,10 +925,11 @@ preview = ['pixi-build']"#, // Check the lock file let lock = pixi.lock_file().await.unwrap(); + let p = lock.platform(&Platform::Linux64.to_string()).unwrap(); let git_package = lock .default_environment() .unwrap() - .packages(Platform::Linux64) + .packages(p) .unwrap() .find(|p| p.as_conda().unwrap().location().as_str().contains("git+")); @@ -979,10 +982,11 @@ preview = ['pixi-build']"#, // Check the lock file let lock = pixi.lock_file().await.unwrap(); + let p = lock.platform(&Platform::Win64.to_string()).unwrap(); let git_package = lock .default_environment() .unwrap() - .packages(Platform::Win64) + .packages(p) .unwrap() .find(|p| p.as_conda().unwrap().location().as_str().contains("git+")); @@ -1082,13 +1086,16 @@ platforms = ["{platform}"] }); let lock_file = pixi.lock_file().await.unwrap(); + let p = lock_file + .platform(&Platform::current().to_string()) + .unwrap(); - let (boltons, _) = lock_file + let boltons = lock_file .default_environment() .unwrap() - .pypi_packages(Platform::current()) + .pypi_packages(p) .unwrap() - .find(|(p, _)| p.name.to_string() == "boltons") + .find(|p| p.name.to_string() == "boltons") .unwrap(); insta::with_settings!( {filters => vec![ diff --git a/crates/pixi/tests/integration_rust/common/mod.rs b/crates/pixi/tests/integration_rust/common/mod.rs index e69beae190..0067bffa5e 100644 --- a/crates/pixi/tests/integration_rust/common/mod.rs +++ b/crates/pixi/tests/integration_rust/common/mod.rs @@ -144,14 +144,6 @@ pub trait LockFileExt { platform: Platform, package: &str, ) -> Option>; - - /// Check if a PyPI package is marked as editable in the lock file - fn is_pypi_package_editable( - &self, - environment: &str, - platform: Platform, - package: &str, - ) -> Option; } impl LockFileExt for LockFile { @@ -159,8 +151,11 @@ impl LockFileExt for LockFile { let Some(env) = self.environment(environment) else { return false; }; + let Some(p) = self.platform(&platform.to_string()) else { + return false; + }; - env.packages(platform) + env.packages(p) .into_iter() .flatten() .filter_map(LockedPackageRef::as_conda) @@ -170,12 +165,15 @@ impl LockFileExt for LockFile { let Some(env) = self.environment(environment) else { return false; }; + let Some(p) = self.platform(&platform.to_string()) else { + return false; + }; - env.packages(platform) + env.packages(p) .into_iter() .flatten() .filter_map(LockedPackageRef::as_pypi) - .any(|(data, _)| data.name.as_ref() == name) + .any(|data| data.name.as_ref() == name) } fn contains_match_spec( @@ -188,8 +186,11 @@ impl LockFileExt for LockFile { let Some(env) = self.environment(environment) else { return false; }; + let Some(p) = self.platform(&platform.to_string()) else { + return false; + }; - env.packages(platform) + env.packages(p) .into_iter() .flatten() .filter_map(LockedPackageRef::as_conda) @@ -206,12 +207,15 @@ impl LockFileExt for LockFile { eprintln!("environment not found: {environment}"); return false; }; + let Some(p) = self.platform(&platform.to_string()) else { + return false; + }; - env.packages(platform) + env.packages(p) .into_iter() .flatten() .filter_map(LockedPackageRef::as_pypi) - .any(move |(data, _)| data.satisfies(&requirement)) + .any(move |data| data.satisfies(&requirement)) } fn get_pypi_package_version( @@ -220,13 +224,13 @@ impl LockFileExt for LockFile { platform: Platform, package: &str, ) -> Option { + let p = self.platform(&platform.to_string())?; self.environment(environment) .and_then(|env| { - env.pypi_packages(platform).and_then(|mut packages| { - packages.find(|(data, _)| data.name.as_ref() == package) - }) + env.pypi_packages(p) + .and_then(|mut packages| packages.find(|data| data.name.as_ref() == package)) }) - .map(|(data, _)| data.version.to_string()) + .map(|data| data.version_string()) } fn get_pypi_package( @@ -235,8 +239,9 @@ impl LockFileExt for LockFile { platform: Platform, package: &str, ) -> Option> { + let p = self.platform(&platform.to_string())?; self.environment(environment).and_then(|env| { - env.packages(platform) + env.packages(p) .and_then(|mut packages| packages.find(|p| p.name() == package)) }) } @@ -247,28 +252,14 @@ impl LockFileExt for LockFile { platform: Platform, package: &str, ) -> Option { + let p = self.platform(&platform.to_string())?; self.environment(environment) .and_then(|env| { - env.packages(platform) + env.packages(p) .and_then(|mut packages| packages.find(|p| p.name() == package)) }) .map(|p| p.location().clone()) } - - fn is_pypi_package_editable( - &self, - environment: &str, - platform: Platform, - package: &str, - ) -> Option { - self.environment(environment) - .and_then(|env| { - env.pypi_packages(platform).and_then(|mut packages| { - packages.find(|(data, _)| data.name.as_ref() == package) - }) - }) - .map(|(data, _)| data.editable) - } } impl PixiControl { diff --git a/crates/pixi/tests/integration_rust/pypi_tests.rs b/crates/pixi/tests/integration_rust/pypi_tests.rs index ea297cd1d2..757148c232 100644 --- a/crates/pixi/tests/integration_rust/pypi_tests.rs +++ b/crates/pixi/tests/integration_rust/pypi_tests.rs @@ -150,6 +150,263 @@ test = {{features = ["test"]}} ); } +fn write_subproject(pixi: &PixiControl, name: &str, version: &str) -> std::io::Result<()> { + fs_err::create_dir(pixi.workspace_path().join(name))?; + let mut file = File::create(pixi.workspace_path().join(format!("{name}/pyproject.toml")))?; // Creates or overwrites the file + file.write_all( + format!( + r#"[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "{name}" +version = "{version}" + "# + ) + .as_bytes(), + ) +} + +#[tokio::test] +#[cfg_attr(not(feature = "online_tests"), ignore)] +async fn pyproject_relative_path_dependencies() { + setup_tracing(); + + let simple = PyPIDatabase::new() + .with(PyPIPackage::new("mine", "1.0.0")) + .with(PyPIPackage::new("also_mine", "1.0.0")) + .into_simple_index() + .unwrap(); + + let platform = Platform::current(); + let platform_str = platform.to_string(); + + let index_url = simple.index_url(); + + let pyproject = format!( + r#" +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "relative-path-dependencies" +version = "0.9.9" +dependencies = [ + "mine @ ./mine" +] + +[tool.pixi.workspace] +channels = ["conda-forge"] +platforms = ["{platform_str}"] +conda-pypi-map = {{}} + +[tool.pixi.dependencies] +python = "==3.11.0" + +[tool.pixi.pypi-dependencies] +also_mine = {{ path = "./also_mine" }} + +[tool.pixi.pypi-options] +index-url = "{index_url}" +"#, + ); + + let pixi = PixiControl::from_pyproject_manifest(&pyproject).unwrap(); + write_subproject(&pixi, "mine", "0.1.0").unwrap(); + write_subproject(&pixi, "also_mine", "2.1.0").unwrap(); + + println!("Calling update_lock_file now\n"); + let lock = pixi.update_lock_file().await.unwrap(); + + match lock.get_pypi_package("default", platform, "mine").unwrap() { + rattler_lock::LockedPackageRef::Conda(_) => { + panic!("Got a Conda package when I expected a pypi one") + } + rattler_lock::LockedPackageRef::Pypi(pkg) => { + assert_eq!(pkg.name.as_dist_info_name(), "mine"); + assert_eq!(pkg.location.given(), Some("./mine")); + assert!( + pkg.index_url.is_none(), + "path-based source package should not have index_url, got: {:?}", + pkg.index_url + ); + } + } + match lock + .get_pypi_package("default", platform, "also-mine") + .unwrap() + { + rattler_lock::LockedPackageRef::Conda(_) => { + panic!("Got a Conda package when I expected a pypi one") + } + rattler_lock::LockedPackageRef::Pypi(pkg) => { + assert_eq!(pkg.name.as_dist_info_name(), "also_mine"); + assert_eq!(pkg.location.given(), Some("./also_mine")); + assert!( + pkg.index_url.is_none(), + "path-based source package should not have index_url, got: {:?}", + pkg.index_url + ); + } + } +} + +#[tokio::test] +#[cfg_attr(not(feature = "online_tests"), ignore)] +async fn pyproject_dynamic_version_source_dependency() { + setup_tracing(); + + let platform = Platform::current(); + let platform_str = platform.to_string(); + + let pyproject = format!( + r#" +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "main-package" +version = "1.0.0" + +[tool.pixi.workspace] +channels = ["conda-forge"] +platforms = ["{platform_str}"] +conda-pypi-map = {{}} + +[tool.pixi.dependencies] +python = "==3.11.0" + +[tool.pixi.pypi-dependencies] +dynamic-dep = {{ path = "./dynamic-dep" }} +"#, + ); + + let pixi = PixiControl::from_pyproject_manifest(&pyproject).unwrap(); + + // Create a source dependency with a dynamic version + fs_err::create_dir(pixi.workspace_path().join("dynamic-dep")).unwrap(); + fs_err::write( + pixi.workspace_path().join("dynamic-dep/pyproject.toml"), + r#"[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "dynamic-dep" +dynamic = ["version"] +"#, + ) + .unwrap(); + + // Create a minimal setup.py that provides the dynamic version + fs_err::write( + pixi.workspace_path().join("dynamic-dep/setup.py"), + r#"from setuptools import setup +setup(version="42.23.12") +"#, + ) + .unwrap(); + + let lock = pixi.update_lock_file().await.unwrap(); + + // The lock file should contain the dynamic-dep package + let pkg = lock + .get_pypi_package("default", platform, "dynamic-dep") + .expect("dynamic-dep should be in the lock file"); + + match pkg { + rattler_lock::LockedPackageRef::Pypi(data) => { + eprintln!("dynamic-dep version in lock file: {:?}", data.version); + // A source dependency with dynamic version should have no version in the lock file + assert!( + data.version.is_none(), + "expected no version for dynamic source dependency, got {:?}", + data.version + ); + assert!( + data.index_url.is_none(), + "path-based source package should not have index_url, got: {:?}", + data.index_url + ); + } + _ => panic!("expected a pypi package"), + } + + // Round-trip: serialize and parse the lock file, then verify the version is still None + let lock_str = lock.render_to_string().unwrap(); + let lock2 = rattler_lock::LockFile::from_str_with_base_directory(&lock_str, None).unwrap(); + match lock2 + .get_pypi_package("default", platform, "dynamic-dep") + .expect("dynamic-dep should survive round-trip") + { + rattler_lock::LockedPackageRef::Pypi(data) => { + assert!( + data.version.is_none(), + "version should be None after round-trip, got {:?}", + data.version + ); + assert!( + data.index_url.is_none(), + "index_url should be None after round-trip, got: {:?}", + data.index_url + ); + } + _ => panic!("expected a pypi package"), + } + + // Write the round-tripped lock file back, then add a new pypi dependency + // to force a full re-resolve while the lock file with None version is on disk. + let workspace = pixi.workspace().unwrap(); + lock2.to_path(&workspace.lock_file_path()).unwrap(); + + // Create a second source dependency to force the lock file to be stale + fs_err::create_dir(pixi.workspace_path().join("another-dep")).unwrap(); + fs_err::write( + pixi.workspace_path().join("another-dep/pyproject.toml"), + r#"[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "another-dep" +version = "1.0.0" +"#, + ) + .unwrap(); + fs_err::write( + pixi.workspace_path().join("another-dep/setup.py"), + "from setuptools import setup\nsetup()\n", + ) + .unwrap(); + + pixi.add_pypi("another-dep @ ./another-dep").await.unwrap(); + + match pixi + .lock_file() + .await + .unwrap() + .get_pypi_package("default", platform, "dynamic-dep") + .expect("dynamic-dep should survive re-resolve") + { + rattler_lock::LockedPackageRef::Pypi(data) => { + assert!( + data.version.is_none(), + "version should be None after re-resolve, got {:?}", + data.version + ); + assert!( + data.index_url.is_none(), + "index_url should be None after re-resolve, got: {:?}", + data.index_url + ); + } + _ => panic!("expected a pypi package"), + } +} + #[tokio::test] async fn pyproject_environment_markers_resolved() { setup_tracing(); @@ -1417,6 +1674,21 @@ version = "0.1.0" // First, update the lock file (this won't have editable field since we don't record it) let lock = pixi.update_lock_file().await.unwrap(); + // Path-based source package should not have index_url + match lock + .get_pypi_package("default", platform, "editable-test") + .expect("editable-test should be in the lock file") + { + rattler_lock::LockedPackageRef::Pypi(data) => { + assert!( + data.index_url.is_none(), + "path-based source package should not have index_url, got: {:?}", + data.index_url + ); + } + _ => panic!("expected a pypi package"), + } + // Manually modify the lock file to add editable: true, simulating an old lock file let lock_file_str = lock.render_to_string().unwrap(); @@ -1432,21 +1704,13 @@ version = "0.1.0" ); // Parse and write the modified lock file back - let modified_lockfile = LockFile::from_str(&modified_lock_file_str).unwrap(); + let modified_lockfile = + LockFile::from_str_with_base_directory(&modified_lock_file_str, None).unwrap(); let workspace = pixi.workspace().unwrap(); modified_lockfile .to_path(&workspace.lock_file_path()) .unwrap(); - // Verify the lock file now has editable: true - let lock_after_modification = pixi.lock_file().await.unwrap(); - assert!( - lock_after_modification - .is_pypi_package_editable("default", platform, "editable-test") - .unwrap_or(false), - "Lock file should have editable: true after manual modification" - ); - // Now install with --locked (uses the modified lock file without re-resolving) // The fix should ensure that the package is installed as NON-editable // because the manifest doesn't specify editable = true @@ -1460,3 +1724,208 @@ version = "0.1.0" "Package should NOT be installed as editable when manifest doesn't specify editable = true (even if lock file has editable: true)" ); } + +/// Test that packages from different indexes get distinct `index_url` values +/// recorded in the lock file. +#[tokio::test] +async fn test_index_url_in_lock_file() { + setup_tracing(); + + let platform = Platform::current(); + + // Create local conda channel with Python + let mut package_db = MockRepoData::default(); + package_db.add_package( + Package::build("python", "3.12.0") + .with_subdir(platform) + .finish(), + ); + let channel = package_db.into_channel().await.unwrap(); + + // Default index with "rsa" + let default_index = PyPIDatabase::new() + .with(PyPIPackage::new("rsa", "4.9.1")) + .into_simple_index() + .unwrap(); + + // Custom index with "torch" + let custom_index = PyPIDatabase::new() + .with(PyPIPackage::new("torch", "2.0.0")) + .into_simple_index() + .unwrap(); + + let pixi = PixiControl::from_manifest(&format!( + r#" + [workspace] + name = "index-url-test" + platforms = ["{platform}"] + channels = ["{channel_url}"] + conda-pypi-map = {{}} + + [dependencies] + python = "==3.12.0" + + [pypi-dependencies] + rsa = "*" + torch = {{ version = "*", index = "{custom_index_url}" }} + + [pypi-options] + index-url = "{default_index_url}" + "#, + platform = platform, + channel_url = channel.url(), + default_index_url = default_index.index_url(), + custom_index_url = custom_index.index_url(), + )) + .unwrap(); + + let lock_file = pixi.update_lock_file().await.unwrap(); + + let p = lock_file + .platform(&platform.to_string()) + .expect("platform should exist"); + let env = lock_file + .environment("default") + .expect("default environment should exist"); + + // torch should have index_url set to the custom index + let torch = env + .pypi_packages(p) + .expect("should have pypi packages") + .find(|data| data.name.as_ref() == "torch") + .expect("torch should be in pypi packages"); + assert_eq!( + torch.index_url.as_ref().map(|u| u.as_str()), + Some(custom_index.index_url().as_str()), + "torch should have index_url set to the custom index" + ); + + // rsa should have the default index URL, not the custom one + let rsa = env + .pypi_packages(p) + .expect("should have pypi packages") + .find(|data| data.name.as_ref() == "rsa") + .expect("rsa should be in pypi packages"); + assert_eq!( + rsa.index_url.as_ref().map(|u| u.as_str()), + Some(default_index.index_url().as_str()), + "rsa should have the default index URL" + ); +} + +/// Test that the default PyPI index URL is elided from the serialized lock file +/// while custom index URLs are preserved. Rattler handles the elision; pixi +/// always passes through the index URL. +/// +/// Requires network access for real PyPI resolution. +#[tokio::test] +#[cfg_attr(not(feature = "online_tests"), ignore)] +async fn test_index_url_omitted_for_default_pypi() { + setup_tracing(); + + // pytorch only has wheels for linux-64, so target that platform. + let platform = Platform::current(); + let platforms = match platform { + Platform::Linux64 => "\"linux-64\"".to_string(), + _ => format!("\"{platform}\", \"linux-64\""), + }; + + // Create local conda channel with Python for all relevant platforms + let mut package_db = MockRepoData::default(); + package_db.add_package( + Package::build("python", "3.12.0") + .with_subdir(Platform::Linux64) + .finish(), + ); + if platform != Platform::Linux64 { + package_db.add_package( + Package::build("python", "3.12.0") + .with_subdir(platform) + .finish(), + ); + } + let channel = package_db.into_channel().await.unwrap(); + + let pixi = PixiControl::from_manifest(&format!( + r#" + [workspace] + name = "index-url-pypi-test" + platforms = [{platforms}] + channels = ["{channel_url}"] + conda-pypi-map = {{}} + + [dependencies] + python = "==3.12.0" + + [target.linux-64.pypi-dependencies] + rsa = ">=4.9.1, <5" + torch = {{ version = "*", index = "https://download.pytorch.org/whl/cu124" }} + "#, + channel_url = channel.url(), + )) + .unwrap(); + + let lock_file = pixi.update_lock_file().await.unwrap(); + + let p = lock_file + .platform("linux-64") + .expect("linux-64 platform should exist"); + let env = lock_file + .environment("default") + .expect("default environment should exist"); + + // torch should have index_url set to the pytorch index + let torch = env + .pypi_packages(p) + .expect("should have pypi packages") + .find(|data| data.name.as_ref() == "torch") + .expect("torch should be in pypi packages"); + assert!( + torch + .index_url + .as_ref() + .expect("torch should have index_url") + .as_str() + .contains("download.pytorch.org"), + "torch index_url should point to pytorch: {:?}", + torch.index_url + ); + + // rsa comes from real PyPI — index_url is set but rattler elides it + // during serialization + let rsa = env + .pypi_packages(p) + .expect("should have pypi packages") + .find(|data| data.name.as_ref() == "rsa") + .expect("rsa should be in pypi packages"); + assert!( + rsa.index_url + .as_ref() + .expect("rsa should have index_url") + .as_str() + .contains("pypi.org"), + "rsa index_url should point to pypi.org: {:?}", + rsa.index_url + ); + + // Verify the serialized lock file: pytorch index URL should appear, + // pypi.org should be elided by rattler + let lock_file_content = lock_file.render_to_string().unwrap(); + assert!( + lock_file_content.contains("download.pytorch.org"), + "serialized lock file should contain the pytorch index URL" + ); + assert!( + !lock_file_content.contains("index_url: https://pypi.org"), + "serialized lock file should not contain index_url for the default PyPI index" + ); + + // Round-trip: parse and re-serialize, the output should be identical + let lock_file_rt = + rattler_lock::LockFile::from_str_with_base_directory(&lock_file_content, None).unwrap(); + assert_eq!( + lock_file_content, + lock_file_rt.render_to_string().unwrap(), + "lock file content should be identical after round-trip" + ); +} diff --git a/crates/pixi/tests/integration_rust/solve_group_tests.rs b/crates/pixi/tests/integration_rust/solve_group_tests.rs index d79a05205d..d3f438771d 100644 --- a/crates/pixi/tests/integration_rust/solve_group_tests.rs +++ b/crates/pixi/tests/integration_rust/solve_group_tests.rs @@ -205,10 +205,13 @@ async fn test_purl_are_added_for_pypi() { let lock_file = pixi.update_lock_file().await.unwrap(); // Check if boltons has a purl + let p = lock_file + .platform(&Platform::current().to_string()) + .unwrap(); lock_file .default_environment() .unwrap() - .packages(Platform::current()) + .packages(p) .unwrap() .for_each(|dep| { if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { @@ -225,10 +228,13 @@ async fn test_purl_are_added_for_pypi() { let lock_file = pixi.update_lock_file().await.unwrap(); // Check if boltons has a purl + let p = lock_file + .platform(&Platform::current().to_string()) + .unwrap(); lock_file .default_environment() .unwrap() - .packages(Platform::current()) + .packages(p) .unwrap() .for_each(|dep| { if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { @@ -940,8 +946,9 @@ async fn test_custom_mapping_ignores_backwards_compatibility() { // Get the lock file let lock = pixi.lock_file().await.unwrap(); + let p = lock.platform(&Platform::Linux64.to_string()).unwrap(); let environment = lock.environment(DEFAULT_ENVIRONMENT_NAME).unwrap(); - let conda_packages = environment.conda_packages(Platform::Linux64).unwrap(); + let conda_packages = environment.conda_packages(p).unwrap(); // Collect conda packages to a vector so we can iterate over them let conda_packages: Vec<_> = conda_packages.collect(); @@ -1064,30 +1071,6 @@ version = "0.1.0" lock_file.contains_pypi_package("dev", platform, "my-local-pkg"), "dev environment should contain my-local-pkg" ); - - // With the new architecture, the lock file always stores editable=false - // The actual editability is determined from the manifest at install time - let prod_editable = lock_file - .is_pypi_package_editable("prod", platform, "my-local-pkg") - .expect("should find my-local-pkg in prod"); - let dev_editable = lock_file - .is_pypi_package_editable("dev", platform, "my-local-pkg") - .expect("should find my-local-pkg in dev"); - - // Both should have editable=false in the lock file - // The actual editability is applied at install time based on the manifest - assert!( - !prod_editable, - "prod environment should have my-local-pkg with editable=false in lock file, but got editable={prod_editable}", - ); - assert!( - !dev_editable, - "dev environment should have my-local-pkg with editable=false in lock file, but got editable={dev_editable}", - ); - - // The key benefit of this architecture is that changing editability in the manifest - // does NOT require re-locking - only re-installing. Both environments share the same - // lock file entry but can have different editability at install time. } #[tokio::test] diff --git a/crates/pixi/tests/integration_rust/update_tests.rs b/crates/pixi/tests/integration_rust/update_tests.rs index 73df6aa379..a48eafe6de 100644 --- a/crates/pixi/tests/integration_rust/update_tests.rs +++ b/crates/pixi/tests/integration_rust/update_tests.rs @@ -1,5 +1,3 @@ -use std::str::FromStr; - use pixi_consts::consts; use rattler_conda_types::Platform; use rattler_lock::LockFile; @@ -207,7 +205,7 @@ async fn test_update_conda_package_doesnt_update_git_pypi() { ) .unwrap(); - let pkg_version = pkg.as_pypi().unwrap().0.version.to_string(); + let pkg_version = pkg.as_pypi().unwrap().version_string(); let mut lock_file_str = lock.render_to_string().unwrap(); @@ -215,7 +213,6 @@ async fn test_update_conda_package_doesnt_update_git_pypi() { let fragment = pkg .as_pypi() .unwrap() - .0 .location .as_url() .unwrap() @@ -227,7 +224,7 @@ async fn test_update_conda_package_doesnt_update_git_pypi() { lock_file_str = lock_file_str.replace(&pkg_version, "0.1.0"); - let lockfile = LockFile::from_str(&lock_file_str).unwrap(); + let lockfile = LockFile::from_str_with_base_directory(&lock_file_str, None).unwrap(); lockfile.to_path(&workspace.lock_file_path()).unwrap(); diff --git a/crates/pixi_api/src/workspace/init/mod.rs b/crates/pixi_api/src/workspace/init/mod.rs index 341ae1201f..224c5ee0f3 100644 --- a/crates/pixi_api/src/workspace/init/mod.rs +++ b/crates/pixi_api/src/workspace/init/mod.rs @@ -157,7 +157,7 @@ pub async fn init(interface: &I, options: InitOptions) -> miette:: Some(name) => (name.to_string(), false), None => (default_name.clone(), true), }; - let environments = pyproject.environments_from_groups().into_diagnostic()?; + let environments = pyproject.environments_from_groups(&dir).into_diagnostic()?; let rv = env .render_named_str( consts::PYPROJECT_MANIFEST, diff --git a/crates/pixi_cli/Cargo.toml b/crates/pixi_cli/Cargo.toml index 3c79eab7d3..a7801e1180 100644 --- a/crates/pixi_cli/Cargo.toml +++ b/crates/pixi_cli/Cargo.toml @@ -41,6 +41,7 @@ miette = { workspace = true, features = ["fancy-no-backtrace"] } miette-arborium = { version = "2.5.0", features = ["lang-toml"] } pathdiff = { workspace = true } pep508_rs = { workspace = true } +percent-encoding = { workspace = true } pixi_api = { workspace = true } pixi_auth = { workspace = true } pixi_build_frontend = { workspace = true } @@ -96,6 +97,7 @@ uv-client = { workspace = true } uv-configuration = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-requirements-txt = { workspace = true } which = { workspace = true } zip = { workspace = true, features = ["deflate", "time"] } diff --git a/crates/pixi_cli/src/build.rs b/crates/pixi_cli/src/build.rs index b6a5a174ae..15b83c175d 100644 --- a/crates/pixi_cli/src/build.rs +++ b/crates/pixi_cli/src/build.rs @@ -318,7 +318,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { variant_configuration: Some(variant_configuration.clone()), variant_files: Some(variant_files.clone()), // Fresh builds don't have pre-existing variants to match against - variants: None, + variants: Default::default(), enabled_protocols: Default::default(), work_directory: None, clean: args.clean, diff --git a/crates/pixi_cli/src/tree.rs b/crates/pixi_cli/src/tree.rs index 7a2ed0acce..fd15cc8380 100644 --- a/crates/pixi_cli/src/tree.rs +++ b/crates/pixi_cli/src/tree.rs @@ -87,7 +87,10 @@ pub async fn execute(args: Args) -> miette::Result<()> { let platform = args.platform.unwrap_or_else(|| environment.best_platform()); let locked_deps = lock_file .environment(environment.name().as_str()) - .and_then(|env| env.packages(platform).map(Vec::from_iter)) + .and_then(|env| { + let p = lock_file.platform(&platform.to_string())?; + env.packages(p).map(Vec::from_iter) + }) .unwrap_or_default(); let dep_map = generate_dependency_map(&locked_deps); @@ -137,7 +140,7 @@ pub(crate) fn extract_package_info( dependencies, source: PackageSource::Conda, }) - } else if let Some((pypi_package_data, _pypi_env_data)) = package.as_pypi() { + } else if let Some(pypi_package_data) = package.as_pypi() { let name = pypi_package_data.name.as_dist_info_name().into_owned(); let dependencies = pypi_package_data .requires_dist @@ -181,7 +184,7 @@ pub fn generate_dependency_map(locked_deps: &[LockedPackageRef<'_>]) -> HashMap< LockedPackageRef::Conda(conda_data) => { conda_data.record().version.to_string() } - LockedPackageRef::Pypi(pypi_data, _) => pypi_data.version.to_string(), + LockedPackageRef::Pypi(pypi_data) => pypi_data.version_string(), }, dependencies: package_info .dependencies diff --git a/crates/pixi_cli/src/update.rs b/crates/pixi_cli/src/update.rs index 678e55e70c..75e1421e05 100644 --- a/crates/pixi_cli/src/update.rs +++ b/crates/pixi_cli/src/update.rs @@ -227,7 +227,8 @@ fn ensure_package_exists( let similar_names = environments .iter() .flat_map(|env| env.packages_by_platform()) - .filter_map(|(p, packages)| { + .filter_map(|(lock_p, packages)| { + let p = lock_p.subdir(); if let Some(platforms) = &specs.platforms && !platforms.contains(&p) { diff --git a/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs b/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs index 4a0577f4e9..700231db51 100644 --- a/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs +++ b/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs @@ -105,7 +105,14 @@ fn render_env_platform( platform: &Platform, ignore_pypi_errors: bool, ) -> miette::Result<()> { - let packages = env.packages(*platform).ok_or(miette::miette!( + let lock_platform = env + .lock_file() + .platform(&platform.to_string()) + .ok_or(miette::miette!( + "platform '{platform}' not found for env {}", + env_name, + ))?; + let packages = env.packages(lock_platform).ok_or(miette::miette!( "platform '{platform}' not found for env {}", env_name, ))?; @@ -124,7 +131,7 @@ fn render_env_platform( a spec file containing only the binary conda dependencies from the lockfile." ); } - LockedPackageRef::Pypi(pypi, _) => { + LockedPackageRef::Pypi(pypi) => { if ignore_pypi_errors { tracing::warn!( "ignoring PyPI package {} since PyPI packages are not supported", @@ -198,7 +205,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { let mut env_platform = Vec::new(); for (env_name, env) in environments { - let available_platforms: HashSet = HashSet::from_iter(env.platforms()); + let available_platforms: HashSet = env.platforms().map(|p| p.subdir()).collect(); if let Some(ref platforms) = args.platform { for plat in platforms { @@ -252,7 +259,8 @@ mod tests { let output_dir = tempdir().unwrap(); for (env_name, env) in lockfile.environments() { - for platform in env.platforms() { + for lock_platform in env.platforms() { + let platform = lock_platform.subdir(); // example contains pypi dependencies so should fail if `ignore_pypi_errors` is // false. assert!( diff --git a/crates/pixi_command_dispatcher/src/build/build_cache.rs b/crates/pixi_command_dispatcher/src/build/build_cache.rs index 98fc697f71..20e58215db 100644 --- a/crates/pixi_command_dispatcher/src/build/build_cache.rs +++ b/crates/pixi_command_dispatcher/src/build/build_cache.rs @@ -68,7 +68,7 @@ pub struct BuildInput { /// The specific variant values for this build. Different variants result /// in different cache keys to ensure they are cached separately. - pub variants: Option>, + pub variants: BTreeMap, } impl BuildInput { diff --git a/crates/pixi_command_dispatcher/src/cache/source_metadata.rs b/crates/pixi_command_dispatcher/src/cache/source_metadata.rs index 02218273a4..3540ea177d 100644 --- a/crates/pixi_command_dispatcher/src/cache/source_metadata.rs +++ b/crates/pixi_command_dispatcher/src/cache/source_metadata.rs @@ -144,7 +144,7 @@ pub struct CachedSourceRecord { pub package_record: PackageRecord, /// The variants that uniquely identify the way this package was built. - pub variants: Option>, + pub variants: BTreeMap, /// Specifies which packages are expected to be installed as source packages /// and from which location. diff --git a/crates/pixi_command_dispatcher/src/source_build/mod.rs b/crates/pixi_command_dispatcher/src/source_build/mod.rs index a827a6727d..4274e276ac 100644 --- a/crates/pixi_command_dispatcher/src/source_build/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_build/mod.rs @@ -85,7 +85,7 @@ pub struct SourceBuildSpec { /// If provided, output matching uses (name, subdir, variants) instead of /// (name, version, build, subdir). The variants must be a subset of the /// output's variants. - pub variants: Option>, + pub variants: BTreeMap, /// The directory where to place the built package. pub output_directory: Option, diff --git a/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs b/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs index 91a5d0733b..ed0b04b592 100644 --- a/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs @@ -49,7 +49,7 @@ pub struct SourceBuildCacheStatusSpec { /// The specific variant values for this build. Different variants result /// in different cache keys to ensure they are cached separately. - pub variants: Option>, + pub variants: BTreeMap, } #[derive(Debug)] diff --git a/crates/pixi_command_dispatcher/src/source_metadata/mod.rs b/crates/pixi_command_dispatcher/src/source_metadata/mod.rs index f01397569d..6c35ce58ff 100644 --- a/crates/pixi_command_dispatcher/src/source_metadata/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_metadata/mod.rs @@ -476,14 +476,12 @@ impl SourceMetadataSpec { .into_iter() .map(|(name, source)| (name.as_source().to_string(), source)) .collect(), - variants: Some( - output - .metadata - .variant - .iter() - .map(|(k, v)| (k.clone(), pixi_record::VariantValue::from(v.clone()))) - .collect(), - ), + variants: output + .metadata + .variant + .iter() + .map(|(k, v)| (k.clone(), pixi_record::VariantValue::from(v.clone()))) + .collect(), }) } diff --git a/crates/pixi_command_dispatcher/tests/integration/main.rs b/crates/pixi_command_dispatcher/tests/integration/main.rs index f088cdd6ae..f46fbf5220 100644 --- a/crates/pixi_command_dispatcher/tests/integration/main.rs +++ b/crates/pixi_command_dispatcher/tests/integration/main.rs @@ -2,7 +2,7 @@ mod event_reporter; mod event_tree; use std::{ - collections::{HashMap, HashSet}, + collections::{BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, // ptr, str::FromStr, @@ -647,7 +647,7 @@ async fn source_build_cache_status_clear_works() { build_environment: build_env, channel_config: default_channel_config(), enabled_protocols: Default::default(), - variants: None, + variants: BTreeMap::new(), }; let first = dispatcher diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index 6017557a65..a7ab85e6b7 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -285,10 +285,6 @@ impl ConfigCliActivation { #[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] pub struct RepodataChannelConfig { - /// Disable JLAP compression for repodata. - #[serde(alias = "disable_jlap")] // BREAK: remove to stop supporting snake_case alias - #[serde(skip_serializing_if = "Option::is_none")] - pub disable_jlap: Option, /// Disable bzip2 compression for repodata. #[serde(alias = "disable_bzip2")] // BREAK: remove to stop supporting snake_case alias #[serde(skip_serializing_if = "Option::is_none")] @@ -312,7 +308,6 @@ impl RepodataChannelConfig { pub fn merge(&self, other: Self) -> Self { Self { - disable_jlap: self.disable_jlap.or(other.disable_jlap), disable_zstd: self.disable_zstd.or(other.disable_zstd), disable_bzip2: self.disable_bzip2.or(other.disable_bzip2), disable_sharded: self.disable_sharded.or(other.disable_sharded), @@ -1390,7 +1385,6 @@ impl Config { "pypi-config.keyring-provider", "repodata-config", "repodata-config.disable-bzip2", - "repodata-config.disable-jlap", "repodata-config.disable-sharded", "repodata-config.disable-zstd", "run-post-link-scripts", @@ -1666,10 +1660,6 @@ impl Config { let subkey = key.strip_prefix("repodata-config.").unwrap(); match subkey { - "disable-jlap" => { - self.repodata_config.default.disable_jlap = - value.map(|v| v.parse()).transpose().into_diagnostic()?; - } "disable-bzip2" => { self.repodata_config.default.disable_bzip2 = value.map(|v| v.parse()).transpose().into_diagnostic()?; @@ -2306,7 +2296,6 @@ UNUSED = "unused" repodata_config: RepodataConfig { default: RepodataChannelConfig { disable_bzip2: Some(true), - disable_jlap: Some(true), disable_sharded: Some(true), disable_zstd: Some(true), }, @@ -2444,7 +2433,6 @@ UNUSED = "unused" "https://prefix.dev/conda-forge" ] [repodata_config] - disable_jlap = true disable_bzip2 = true disable_zstd = true "#; @@ -2466,7 +2454,6 @@ UNUSED = "unused" Some(&vec![Url::parse("https://prefix.dev/conda-forge").unwrap()]) ); let repodata_config = config.repodata_config; - assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, None); @@ -2481,7 +2468,6 @@ UNUSED = "unused" "https://prefix.dev/conda-forge" ] [repodata-config] - disable-jlap = true disable-bzip2 = true disable-zstd = true disable-sharded = true @@ -2545,12 +2531,6 @@ UNUSED = "unused" Some(&vec![Url::parse("https://prefix.dev/conda-forge").unwrap()]) ); - config - .set("repodata-config.disable-jlap", Some("true".to_string())) - .unwrap(); - let repodata_config = config.repodata_config(); - assert_eq!(repodata_config.default.disable_jlap, Some(true)); - config .set( "pypi-config.index-url", @@ -2870,25 +2850,21 @@ UNUSED = "unused" fn test_repodata_config() { let toml = r#" [repodata-config] - disable-jlap = true disable-bzip2 = true disable-zstd = true disable-sharded = true [repodata-config."https://prefix.dev/conda-forge"] - disable-jlap = false disable-bzip2 = false disable-zstd = false disable-sharded = false [repodata-config."https://conda.anaconda.org/conda-forge"] - disable-jlap = false disable-bzip2 = false disable-zstd = false "#; let (config, _) = Config::from_toml(toml, None).unwrap(); let repodata_config = config.repodata_config(); - assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, Some(true)); @@ -2899,7 +2875,6 @@ UNUSED = "unused" let prefix_config = per_channel .get(&Url::from_str("https://prefix.dev/conda-forge").unwrap()) .unwrap(); - assert_eq!(prefix_config.disable_jlap, Some(false)); assert_eq!(prefix_config.disable_bzip2, Some(false)); assert_eq!(prefix_config.disable_zstd, Some(false)); assert_eq!(prefix_config.disable_sharded, Some(false)); @@ -2907,7 +2882,6 @@ UNUSED = "unused" let anaconda_config = per_channel .get(&Url::from_str("https://conda.anaconda.org/conda-forge").unwrap()) .unwrap(); - assert_eq!(anaconda_config.disable_jlap, Some(false)); assert_eq!(anaconda_config.disable_bzip2, Some(false)); assert_eq!(anaconda_config.disable_zstd, Some(false)); assert_eq!(anaconda_config.disable_sharded, None); diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap index 063bcde6de..c16519292a 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap @@ -77,9 +77,6 @@ Config { }, repodata_config: RepodataConfig { default: RepodataChannelConfig { - disable_jlap: Some( - true, - ), disable_bzip2: None, disable_zstd: Some( true, diff --git a/crates/pixi_config/tests/config/config_2.toml b/crates/pixi_config/tests/config/config_2.toml index 934ed4bd2c..94fd7d8dc8 100644 --- a/crates/pixi_config/tests/config/config_2.toml +++ b/crates/pixi_config/tests/config/config_2.toml @@ -2,7 +2,6 @@ change_ps1 = true tls_no_verify = false [repodata_config] -disable_jlap = true disable_zstd = true [mirrors] diff --git a/crates/pixi_core/src/activation.rs b/crates/pixi_core/src/activation.rs index dfda191ef2..7eb97f2827 100644 --- a/crates/pixi_core/src/activation.rs +++ b/crates/pixi_core/src/activation.rs @@ -491,7 +491,6 @@ pub(crate) async fn initialize_env_variables( mod tests { use super::*; use std::path::Path; - use std::str::FromStr; #[test] fn test_metadata_env() { @@ -705,7 +704,7 @@ packages: "#, platform = Platform::current() ); - let lock_file = LockFile::from_str(mock_lock).unwrap(); + let lock_file = LockFile::from_str_with_base_directory(mock_lock, None).unwrap(); let env = run_activation( &default_env, &CurrentEnvVarBehavior::Include, diff --git a/crates/pixi_core/src/environment/mod.rs b/crates/pixi_core/src/environment/mod.rs index 7cb2f69c7d..9b94babd27 100644 --- a/crates/pixi_core/src/environment/mod.rs +++ b/crates/pixi_core/src/environment/mod.rs @@ -147,7 +147,9 @@ impl EnvironmentHash { // Hash the packages let mut urls = Vec::new(); if let Some(env) = lock_file.environment(run_environment.name().as_str()) - && let Some(packages) = env.packages(run_environment.best_platform()) + && let Some(lock_platform) = + lock_file.platform(&run_environment.best_platform().to_string()) + && let Some(packages) = env.packages(lock_platform) { for package in packages { urls.push(package.location().to_string()) @@ -178,7 +180,8 @@ impl LockedEnvironmentHash { // Intentionally ignore `skipped` here: the quick-validate cache is only // used during runs, and should not vary based on transient install // filters. - if let Some(packages) = environment.packages(platform) { + let lock_platform = environment.lock_file().platform(&platform.to_string()); + if let Some(packages) = lock_platform.and_then(|p| environment.packages(p)) { for package in packages { // Always has the url or path package.location().to_owned().to_string().hash(&mut hasher); @@ -192,10 +195,7 @@ impl LockedEnvironmentHash { md5.hash(&mut hasher); } } - LockedPackageRef::Pypi(pack, env) => { - pack.editable.hash(&mut hasher); - env.extras.hash(&mut hasher); - } + LockedPackageRef::Pypi(_) => {} } } } diff --git a/crates/pixi_core/src/lock_file/install_subset.rs b/crates/pixi_core/src/lock_file/install_subset.rs index 002a47363e..b2e3b42745 100644 --- a/crates/pixi_core/src/lock_file/install_subset.rs +++ b/crates/pixi_core/src/lock_file/install_subset.rs @@ -63,7 +63,7 @@ impl<'a> From> for PackageNode { }) .collect() } - LockedPackageRef::Pypi(pypi_data, _env_data) => { + LockedPackageRef::Pypi(pypi_data) => { // For PyPI, use the requirement directly to get the name pypi_data .requires_dist @@ -78,7 +78,7 @@ impl<'a> From> for PackageNode { dependencies: dependency_names, source: match package_ref { LockedPackageRef::Conda(_) => PackageSource::Conda, - LockedPackageRef::Pypi(_, _) => PackageSource::Pypi, + LockedPackageRef::Pypi(_) => PackageSource::Pypi, }, } } diff --git a/crates/pixi_core/src/lock_file/mod.rs b/crates/pixi_core/src/lock_file/mod.rs index 1cae2988c0..a86092820c 100644 --- a/crates/pixi_core/src/lock_file/mod.rs +++ b/crates/pixi_core/src/lock_file/mod.rs @@ -14,7 +14,8 @@ pub use install_subset::{FilteredPackages, InstallSubset}; pub use package_identifier::PypiPackageIdentifier; use pixi_record::PixiRecord; pub use pixi_uv_context::UvResolutionContext; -use rattler_lock::{PypiPackageData, PypiPackageEnvironmentData}; +use rattler_lock::PypiPackageData; +pub use rattler_lock::Verbatim; pub use records_by_name::{PixiRecordsByName, PypiRecordsByName}; pub use resolve::pypi::resolve_pypi; pub use satisfiability::{ @@ -33,11 +34,7 @@ pub use utils::IoConcurrencyLimit; pub type LockedCondaPackages = Vec; /// A list of Pypi packages that are locked for a specific platform. -pub type LockedPypiPackages = Vec; - -/// A single Pypi record that contains both the package data and the environment -/// data. In Pixi we basically always need both. -pub type PypiRecord = (PypiPackageData, PypiPackageEnvironmentData); +pub type LockedPypiPackages = Vec; #[cfg(test)] mod tests { diff --git a/crates/pixi_core/src/lock_file/outdated.rs b/crates/pixi_core/src/lock_file/outdated.rs index 4b17ff5369..14e8e0228c 100644 --- a/crates/pixi_core/src/lock_file/outdated.rs +++ b/crates/pixi_core/src/lock_file/outdated.rs @@ -426,7 +426,12 @@ fn find_inconsistent_solve_groups<'p>( continue; }; - for package in locked_env.packages(platform).into_iter().flatten() { + let lock_platform = locked_env.lock_file().platform(&platform.to_string()); + for package in lock_platform + .and_then(|p| locked_env.packages(p)) + .into_iter() + .flatten() + { match package { LockedPackageRef::Conda(pkg) => { match conda_packages_by_name.get(&pkg.record().name) { @@ -440,7 +445,7 @@ fn find_inconsistent_solve_groups<'p>( _ => {} } } - LockedPackageRef::Pypi(pkg, _) => match pypi_packages_by_name.get(&pkg.name) { + LockedPackageRef::Pypi(pkg) => match pypi_packages_by_name.get(&pkg.name) { None => { pypi_packages_by_name.insert(pkg.name.clone(), pkg.location.clone()); } diff --git a/crates/pixi_core/src/lock_file/records_by_name.rs b/crates/pixi_core/src/lock_file/records_by_name.rs index cbd0c0cd41..93447482b8 100644 --- a/crates/pixi_core/src/lock_file/records_by_name.rs +++ b/crates/pixi_core/src/lock_file/records_by_name.rs @@ -1,5 +1,5 @@ use super::package_identifier::ConversionError; -use crate::lock_file::{PypiPackageIdentifier, PypiRecord}; +use crate::lock_file::{PypiPackageData, PypiPackageIdentifier}; use pixi_record::PixiRecord; use pixi_uv_conversions::to_uv_normalize; use pypi_modifiers::pypi_tags::is_python_record; @@ -8,7 +8,7 @@ use std::collections::HashMap; use std::collections::hash_map::Entry; use std::hash::Hash; -pub type PypiRecordsByName = DependencyRecordsByName; +pub type PypiRecordsByName = DependencyRecordsByName; pub type PixiRecordsByName = DependencyRecordsByName; /// A trait required from the dependencies stored in DependencyRecordsByName @@ -24,15 +24,17 @@ pub trait HasNameVersion { fn version(&self) -> &Self::V; } -impl HasNameVersion for PypiRecord { +impl HasNameVersion for PypiPackageData { type N = pep508_rs::PackageName; type V = pep440_rs::Version; fn name(&self) -> &pep508_rs::PackageName { - &self.0.name + &self.name } fn version(&self) -> &Self::V { - &self.0.version + self.version + .as_ref() + .expect("pypi record dedup requires a version") } } diff --git a/crates/pixi_core/src/lock_file/resolve/pypi.rs b/crates/pixi_core/src/lock_file/resolve/pypi.rs index c01c52636c..a345d163b1 100644 --- a/crates/pixi_core/src/lock_file/resolve/pypi.rs +++ b/crates/pixi_core/src/lock_file/resolve/pypi.rs @@ -38,9 +38,7 @@ use pypi_modifiers::{ pypi_tags::{get_pypi_tags, is_python_record}, }; use rattler_digest::{Md5, Sha256, parse_digest_from_hex}; -use rattler_lock::{ - PackageHashes, PypiPackageData, PypiPackageEnvironmentData, PypiSourceTreeHashable, UrlOrPath, -}; +use rattler_lock::{PackageHashes, PypiPackageData, PypiSourceTreeHashable, UrlOrPath, Verbatim}; use typed_path::Utf8TypedPathBuf; use url::Url; use uv_cache_key::RepositoryUrl; @@ -66,7 +64,6 @@ use crate::{ environment::CondaPrefixUpdated, lock_file::{ CondaPrefixUpdater, LockedPypiPackages, PixiRecordsByName, PypiPackageIdentifier, - PypiRecord, records_by_name::HasNameVersion, resolve::{ build_dispatch::{ @@ -288,7 +285,7 @@ pub async fn resolve_pypi( dependencies: IndexMap>, system_requirements: SystemRequirements, locked_pixi_records: &[PixiRecord], - locked_pypi_packages: &[PypiRecord], + locked_pypi_packages: &[PypiPackageData], platform: rattler_conda_types::Platform, pb: &ProgressBar, project_root: &Path, @@ -367,7 +364,7 @@ pub async fn resolve_pypi( // Pre-populate the git resolver with locked git references. // This ensures that when uv resolves git dependencies, it will find the cached commit // and not panic in `url_to_precise` function. - for (package_data, _) in locked_pypi_packages { + for package_data in locked_pypi_packages { if let Some(location) = package_data.location.as_url() && LockedGitUrl::is_locked_git_url(location) { @@ -459,6 +456,7 @@ pub async fn resolve_pypi( &index_locations, index_strategy, Some(&marker_environment), + Connectivity::Online, ); let build_options = pypi_options_to_build_options( @@ -617,15 +615,15 @@ pub async fn resolve_pypi( let preferences = locked_pypi_packages .iter() .map(|record| { - let (package_data, _) = record; + let Some(version) = &record.version else { + return Ok(None); + }; let requirement = uv_pep508::Requirement { - name: to_uv_normalize(&package_data.name)?, + name: to_uv_normalize(&record.name)?, extras: Vec::new().into(), version_or_url: Some(uv_pep508::VersionOrUrl::VersionSpecifier( uv_pep440::VersionSpecifiers::from( - uv_pep440::VersionSpecifier::equals_version(to_uv_version( - &package_data.version, - )?), + uv_pep440::VersionSpecifier::equals_version(to_uv_version(version)?), ), )), marker: uv_pep508::MarkerTree::TRUE, @@ -636,7 +634,7 @@ pub async fn resolve_pypi( // because they are resolved based on the reference (branch/tag/rev) in the manifest. // This matches how uv handles git dependencies - it doesn't try to pin them via preferences. // The git resolver cache (pre-populated above) ensures the locked commit is preferred. - if let Some(location) = package_data.location.as_url() + if let Some(location) = record.location.as_url() && LockedGitUrl::is_locked_git_url(location) { // Skip git packages - they'll be resolved based on manifest reference @@ -943,7 +941,7 @@ async fn lock_pypi_packages( concurrent_downloads: usize, abs_project_root: &Path, original_git_references: &HashMap, -) -> miette::Result> { +) -> miette::Result> { let mut locked_packages = LockedPypiPackages::with_capacity(resolution.len()); let database = DistributionDatabase::new(registry_client, pixi_build_dispatch, concurrent_downloads); @@ -961,7 +959,7 @@ async fn lock_pypi_packages( ResolvedDist::Installable { dist, .. } => match &**dist { Dist::Built(dist) => { - let (location, hash) = match &dist { + let (location, hash, index_url) = match &dist { BuiltDist::Registry(dist) => { let best_wheel = dist.best_wheel(); let hash = parse_hashes_from_hash_vec(&dist.best_wheel().file.hashes) @@ -974,7 +972,7 @@ async fn lock_pypi_packages( ) .into_diagnostic() .context("cannot convert registry dist")?; - (url_or_path, hash) + (url_or_path, hash, Some((*best_wheel.index).clone())) } BuiltDist::DirectUrl(dist) => { let url = dist.url.to_url(); @@ -982,7 +980,7 @@ async fn lock_pypi_packages( .into_diagnostic() .context("cannot create direct url")?; - (UrlOrPath::Url(direct_url), None) + (UrlOrPath::Url(direct_url), None, None) } BuiltDist::Path(dist) => ( UrlOrPath::Path( @@ -994,6 +992,7 @@ async fn lock_pypi_packages( .into_diagnostic()?, ), None, + None, ), }; @@ -1006,9 +1005,11 @@ async fn lock_pypi_packages( name: pep508_rs::PackageName::new(metadata.name.to_string()) .into_diagnostic() .context("cannot convert name")?, - version: pep440_rs::Version::from_str(&metadata.version.to_string()) - .into_diagnostic() - .context("cannot convert version")?, + version: Some( + pep440_rs::Version::from_str(&metadata.version.to_string()) + .into_diagnostic() + .context("cannot convert version")?, + ), requires_python: metadata .requires_python .map(|r| to_version_specifiers(&r)) @@ -1018,9 +1019,9 @@ async fn lock_pypi_packages( metadata.requires_dist.iter(), ) .into_diagnostic()?, - editable: false, - location, + location: Verbatim::new(location), hash, + index_url, } } Dist::Source(source) => { @@ -1046,20 +1047,20 @@ async fn lock_pypi_packages( // Use the precise url if we got it back // otherwise try to construct it from the source - let (location, hash, editable) = match source { + let (location, hash, index_url) = match source { SourceDist::Registry(reg) => { let url_or_path = get_url_or_path(®.index, ®.file.url, abs_project_root) .into_diagnostic() .context("cannot convert registry sdist")?; - (url_or_path, hash, false) + (Verbatim::new(url_or_path), hash, Some((*reg.index).clone())) } SourceDist::DirectUrl(direct) => { let url = direct.url.to_url(); let direct_url = Url::parse(&format!("direct+{url}")) .into_diagnostic() .context("could not create direct-url")?; - (direct_url.into(), hash, false) + (Verbatim::new(direct_url.into()), hash, None) } SourceDist::Git(git) => { // Look up the original git reference from the manifest dependencies @@ -1072,9 +1073,11 @@ async fn lock_pypi_packages( let pinned_git_spec = into_pinned_git_spec(git.clone(), original_reference); ( - pinned_git_spec.into_locked_git_url().to_url().into(), + Verbatim::new( + pinned_git_spec.into_locked_git_url().to_url().into(), + ), hash, - false, + None, ) } SourceDist::Path(path) => { @@ -1102,7 +1105,7 @@ async fn lock_pypi_packages( // instead of from the source path to copy the path that was passed in // from the requirement. let url_or_path = UrlOrPath::Path(install_path); - (url_or_path, hash, false) + (Verbatim::new(url_or_path), hash, None) } SourceDist::Directory(dir) => { // Compute the hash of the package based on the source tree. @@ -1125,17 +1128,28 @@ async fn lock_pypi_packages( // Create the url for the lock file. This is based on the passed in URL // instead of from the source path to copy the path that was passed in // from the requirement. - let url_or_path = UrlOrPath::Path(install_path); + let url_or_path = if let Some(given) = dir.url.given() { + Verbatim::new_with_given( + UrlOrPath::Path(install_path), + given.to_string(), + ) + } else { + Verbatim::new(UrlOrPath::Path(install_path)) + }; // Always set editable to false in lock file. // Editability is looked up from manifest at install time. - (url_or_path, hash, false) + (url_or_path, hash, None) } }; PypiPackageData { name: to_normalize(&metadata.name).into_diagnostic()?, - version: pep440_rs::Version::from_str(&metadata.version.to_string()) - .into_diagnostic()?, + version: (!metadata.dynamic) + .then(|| { + pep440_rs::Version::from_str(&metadata.version.to_string()) + .into_diagnostic() + }) + .transpose()?, requires_python: metadata .requires_python .map(|r| to_version_specifiers(&r)) @@ -1145,14 +1159,13 @@ async fn lock_pypi_packages( requires_dist: to_requirements(metadata.requires_dist.iter()) .into_diagnostic()?, hash, - editable, + index_url, } } }, }; - // TODO: Store extras in the lock-file - locked_packages.push((pypi_package_data, PypiPackageEnvironmentData::default())); + locked_packages.push(pypi_package_data); } Ok(locked_packages) diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index 12cdbe11f6..ec1685f0a1 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -23,6 +23,7 @@ use pixi_manifest::{ FeaturesExt, pypi::pypi_options::{NoBuild, PrereleaseMode}, }; +use pixi_pypi_spec::PixiPypiSource; use pixi_record::{ DevSourceRecord, LockedGitUrl, ParseLockFileError, PinnedBuildSourceSpec, PinnedSourceSpec, PixiRecord, SourceMismatchError, SourceRecord, VariantValue, @@ -52,9 +53,7 @@ use uv_distribution_types::{RequirementSource, RequiresPython}; use uv_git_types::GitReference; use uv_pypi_types::ParsedUrlError; -use super::{ - PixiRecordsByName, PypiRecord, PypiRecordsByName, package_identifier::ConversionError, -}; +use super::{PixiRecordsByName, PypiRecordsByName, package_identifier::ConversionError}; use crate::workspace::{ Environment, HasWorkspaceRef, errors::VariantsError, grouped_environment::GroupedEnvironment, }; @@ -547,10 +546,13 @@ pub fn verify_environment_satisfiability( } let platforms = environment.platforms(); - let locked_platforms = locked_environment.platforms().collect::>(); + let locked_platforms = locked_environment + .platforms() + .map(|p| p.subdir()) + .collect::>(); let additional_platforms = locked_platforms .difference(&platforms) - .map(|p| p.to_owned()) + .copied() .collect::>(); if !additional_platforms.is_empty() { return Err(EnvironmentUnsat::AdditionalPlatformsInLockFile( @@ -563,7 +565,8 @@ pub fn verify_environment_satisfiability( // 2. Check if we have a no-build option set, that we only have binary packages, // or an editable source // 3. Check that wheel tags still are possible with current system requirements - if !environment.pypi_dependencies(None).is_empty() { + let pypi_dependencies = environment.pypi_dependencies(None); + if !pypi_dependencies.is_empty() { let group_pypi_options = grouped_env.pypi_options(); let indexes = rattler_lock::PypiIndexes::from(group_pypi_options.clone()); @@ -574,9 +577,14 @@ pub fn verify_environment_satisfiability( let pypi_wheel_tags_check = PypiWheelTagsCheck::new(environment, &locked_environment); // Actually check all pypi packages in one iteration - for (platform, package_it) in locked_environment.pypi_packages_by_platform() { - for (package_data, _) in package_it { - no_build_check.check(package_data)?; + for (lock_platform, package_it) in locked_environment.pypi_packages_by_platform() { + let platform = lock_platform.subdir(); + for package_data in package_it { + let pypi_source = pypi_dependencies + .get(&package_data.name) + .and_then(|specs| specs.last()) + .map(|spec| &spec.source); + no_build_check.check(package_data, pypi_source)?; pypi_wheel_tags_check.check(platform, package_data)?; } } @@ -606,7 +614,6 @@ pub fn verify_environment_satisfiability( let locked_prerelease_mode = locked_environment .solve_options() .pypi_prerelease_mode - .unwrap_or_default() .into(); let expected_prerelease_mode = grouped_env .pypi_options() @@ -646,7 +653,10 @@ impl PypiWheelTagsCheck { let system_requirements = environment.system_requirements(); locked_environment .packages_by_platform() - .flat_map(|(platform, packages)| packages.map(move |package| (platform, package))) + .flat_map(|(lock_platform, packages)| { + let platform = lock_platform.subdir(); + packages.map(move |package| (platform, package)) + }) .filter_map(|(platform, package)| match package { LockedPackageRef::Conda(rattler_lock::CondaPackageData::Binary(package)) => { Some((platform, package)) @@ -728,7 +738,11 @@ impl PypiNoBuildCheck { Self { check } } - pub fn check(&self, package_data: &PypiPackageData) -> Result<(), EnvironmentUnsat> { + pub fn check( + &self, + package_data: &PypiPackageData, + source: Option<&PixiPypiSource>, + ) -> Result<(), EnvironmentUnsat> { let Some(check) = &self.check else { return Ok(()); }; @@ -747,7 +761,7 @@ impl PypiNoBuildCheck { DistExtension::from_path(Path::new(path)) } - let extension = match &package_data.location { + let extension = match &*package_data.location { // Get the extension from the url UrlOrPath::Url(url) => { if url.scheme().starts_with("git+") { @@ -762,7 +776,15 @@ impl PypiNoBuildCheck { // Editables are allowed with no-build // Check this before is_dir() because the path may be relative // and not resolve correctly from the current working directory - if package_data.editable { + let is_editable = source + .map(|source| match source { + PixiPypiSource::Path { path: _, editable } => { + editable.unwrap_or_default() + } + _ => false, + }) + .unwrap_or_default(); + if is_editable { return Ok(()); } let path = Path::new(path.as_str()); @@ -854,8 +876,15 @@ pub async fn verify_platform_satisfiability( ) -> Result> { // Convert the lock file into a list of conda and pypi packages let mut pixi_records: Vec = Vec::new(); - let mut pypi_packages: Vec = Vec::new(); - for package in locked_environment.packages(platform).into_iter().flatten() { + let mut pypi_packages: Vec = Vec::new(); + let lock_platform = locked_environment + .lock_file() + .platform(&platform.to_string()); + for package in lock_platform + .and_then(|p| locked_environment.packages(p)) + .into_iter() + .flatten() + { match package { LockedPackageRef::Conda(conda) => { let url = conda.location().clone(); @@ -864,8 +893,8 @@ pub async fn verify_platform_satisfiability( .map_err(|e| PlatformUnsat::CorruptedEntry(url.to_string(), e))?, ); } - LockedPackageRef::Pypi(pypi, env) => { - pypi_packages.push((pypi.clone(), env.clone())); + LockedPackageRef::Pypi(pypi) => { + pypi_packages.push(pypi.clone()); } } } @@ -902,7 +931,7 @@ pub async fn verify_platform_satisfiability( Ok(pypi_packages) => pypi_packages, Err(duplicate) => { return Err(Box::new(PlatformUnsat::DuplicateEntry( - duplicate.0.name.to_string(), + duplicate.name.to_string(), ))); } }; @@ -961,7 +990,7 @@ pub(crate) fn pypi_satisfies_editable( "editable requirement cannot be from registry, url, git or path (non-directory)" ) } - RequirementSource::Directory { install_path, .. } => match &locked_data.location { + RequirementSource::Directory { install_path, .. } => match &*locked_data.location { // If we have an url requirement locked, but the editable is requested, this does not // satisfy UrlOrPath::Url(url) => Err(Box::new(PlatformUnsat::EditablePackageIsUrl( @@ -1016,7 +1045,7 @@ pub(crate) fn pypi_satisfies_requirement( RequirementSource::Registry { specifier, .. } => { // In the old way we always satisfy based on version so let's keep it similar // here - let version_string = locked_data.version.to_string(); + let version_string = locked_data.version_string(); if specifier.contains( &uv_pep440::Version::from_str(&version_string).expect("could not parse version"), ) { @@ -1031,7 +1060,7 @@ pub(crate) fn pypi_satisfies_requirement( } } RequirementSource::Url { url: spec_url, .. } => { - if let UrlOrPath::Url(locked_url) = &locked_data.location { + if let UrlOrPath::Url(locked_url) = &*locked_data.location { // Url may not start with git, and must start with direct+ if locked_url.as_str().starts_with("git+") || !locked_url.as_str().starts_with("direct+") @@ -1062,7 +1091,7 @@ pub(crate) fn pypi_satisfies_requirement( } => { let repository = git.repository(); let reference = git.reference(); - match &locked_data.location { + match &*locked_data.location { UrlOrPath::Url(url) => { if let Ok(pinned_git_spec) = LockedGitUrl::new(url.clone()).to_pinned_git_spec() { @@ -1156,9 +1185,11 @@ pub(crate) fn pypi_satisfies_requirement( } RequirementSource::Path { install_path, .. } | RequirementSource::Directory { install_path, .. } => { - if let UrlOrPath::Path(locked_path) = &locked_data.location { + if let UrlOrPath::Path(locked_path) = &*locked_data.location { + eprintln!("Path from lock: {locked_path:?}"); let install_path = Utf8TypedPathBuf::from(install_path.to_string_lossy().to_string()); + eprintln!("Path from install: {install_path:?}"); let project_root = Utf8TypedPathBuf::from(project_root.to_string_lossy().to_string()); // Join relative paths with the project root @@ -1167,6 +1198,7 @@ pub(crate) fn pypi_satisfies_requirement( } else { project_root.join(locked_path.to_path()).normalize() }; + eprintln!("Path from lock (always absolute): {locked_path:?}"); if locked_path.to_path() != install_path { return Err(PlatformUnsat::LockedPyPIPathMismatch { name: spec.name.clone().to_string(), @@ -1531,20 +1563,19 @@ fn package_records_are_equal(a: &PackageRecord, b: &PackageRecord) -> Result<(), } fn format_source_record(r: &SourceRecord) -> String { - let variants = r.variants.as_ref().map(|v| { - format!( - "[{}]", - v.iter() - .format_with(", ", |(k, v), f| f(&format_args!("{k}={v}"))) - ) - }); + let variants = format!( + "[{}]", + r.variants + .iter() + .format_with(", ", |(k, v), f| f(&format_args!("{k}={v}"))) + ); format!( "{}/{}={}={} {}", &r.package_record.subdir, r.package_record.name.as_source(), &r.package_record.version, &r.package_record.build, - variants.unwrap_or_default() + variants, ) } @@ -2093,18 +2124,16 @@ pub(crate) async fn verify_package_platform_satisfiability( if requirement.is_editable() { if let Err(err) = - pypi_satisfies_editable(&requirement, &record.0, project_root) + pypi_satisfies_editable(&requirement, &record, project_root) { delayed_pypi_error.get_or_insert(err); } FoundPackage::PyPi(PypiPackageIdx(idx), requirement.extras.to_vec()) } else { - if let Err(err) = pypi_satisfies_requirement( - &requirement, - &record.0, - project_root, - ) { + if let Err(err) = + pypi_satisfies_requirement(&requirement, &record, project_root) + { delayed_pypi_error.get_or_insert(err); } @@ -2198,7 +2227,7 @@ pub(crate) async fn verify_package_platform_satisfiability( if pypi_packages_visited.insert(idx) { // If this is path based package we need to check if the source tree hash still // matches. and if it is a directory - if let UrlOrPath::Path(path) = &record.0.location { + if let UrlOrPath::Path(path) = &*record.location { let absolute_path = if path.is_absolute() { Cow::Borrowed(Path::new(path.as_str())) } else { @@ -2209,13 +2238,13 @@ pub(crate) async fn verify_package_platform_satisfiability( match PypiSourceTreeHashable::from_directory(&absolute_path) .map(|hashable| hashable.hash()) { - Ok(hashable) if Some(&hashable) != record.0.hash.as_ref() => { + Ok(hashable) if Some(&hashable) != record.hash.as_ref() => { delayed_pypi_error.get_or_insert_with(|| { Box::new(PlatformUnsat::SourceTreeHashMismatch( - record.0.name.clone(), + record.name.clone(), SourceTreeHashMismatch { computed: hashable, - locked: record.0.hash.clone(), + locked: record.hash.clone(), }, )) }); @@ -2224,7 +2253,7 @@ pub(crate) async fn verify_package_platform_satisfiability( Err(err) => { delayed_pypi_error.get_or_insert_with(|| { Box::new(PlatformUnsat::FailedToDetermineSourceTreeHash( - record.0.name.clone(), + record.name.clone(), err, )) }); @@ -2234,7 +2263,7 @@ pub(crate) async fn verify_package_platform_satisfiability( } // Ensure that the record matches the currently selected interpreter. - if let Some(requires_python) = &record.0.requires_python { + if let Some(requires_python) = &record.requires_python { let uv_specifier_requires_python = to_uv_specifiers(requires_python) .expect("pep440 conversion should never fail"); @@ -2252,7 +2281,7 @@ pub(crate) async fn verify_package_platform_satisfiability( if !marker_requires_python.is_contained_by(&uv_specifier_requires_python) { delayed_pypi_error.get_or_insert_with(|| { Box::new(PlatformUnsat::PythonVersionMismatch( - record.0.name.clone(), + record.name.clone(), requires_python.clone(), marker_version.into(), )) @@ -2262,7 +2291,7 @@ pub(crate) async fn verify_package_platform_satisfiability( } // Add all the requirements of the package to the queue. - for requirement in &record.0.requires_dist { + for requirement in &record.requires_dist { let requirement = match pep508_requirement_to_uv_requirement(requirement.clone()) { Ok(requirement) => requirement, @@ -2286,7 +2315,7 @@ pub(crate) async fn verify_package_platform_satisfiability( pypi_queue.push(Dependency::PyPi( requirement.clone(), - record.0.name.as_ref().to_string().into(), + record.name.as_ref().to_string().into(), )); } } @@ -2625,7 +2654,7 @@ mod tests { use pixi_build_backend_passthrough::PassthroughBackend; use pixi_build_frontend::BackendOverride; use pixi_command_dispatcher::CacheDirs; - use rattler_lock::LockFile; + use rattler_lock::{LockFile, Verbatim}; use rstest::rstest; use tracing_test::traced_test; @@ -2839,14 +2868,14 @@ mod tests { // Mock locked data let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), - version: Version::from_str("0.1.0").unwrap(), + version: Some(Version::from_str("0.1.0").unwrap()), location: "git+https://github.com/mypkg@rev=29932f3915935d773dc8d52c292cadd81c81071d#29932f3915935d773dc8d52c292cadd81c81071d" .parse() .expect("failed to parse url"), hash: None, + index_url: None, requires_dist: vec![], requires_python: None, - editable: false, }; let spec = pep508_requirement_to_uv_requirement( pep508_rs::Requirement::from_str("mypkg @ git+https://github.com/mypkg@2993").unwrap(), @@ -2859,14 +2888,14 @@ mod tests { let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), - version: Version::from_str("0.1.0").unwrap(), + version: Some(Version::from_str("0.1.0").unwrap()), location: "git+https://github.com/mypkg.git?rev=29932f3915935d773dc8d52c292cadd81c81071d#29932f3915935d773dc8d52c292cadd81c81071d" .parse() .expect("failed to parse url"), hash: None, + index_url: None, requires_dist: vec![], requires_python: None, - editable: false, }; let spec = pep508_requirement_to_uv_requirement( pep508_rs::Requirement::from_str( @@ -2897,15 +2926,15 @@ mod tests { // should satisfy let locked_data_default_branch = PypiPackageData { name: "mypkg".parse().unwrap(), - version: Version::from_str("0.1.0").unwrap(), + version: Some(Version::from_str("0.1.0").unwrap()), // No ?rev= query param, only the fragment with commit hash location: "git+https://github.com/mypkg.git#29932f3915935d773dc8d52c292cadd81c81071d" .parse() .expect("failed to parse url"), hash: None, + index_url: None, requires_dist: vec![], requires_python: None, - editable: false, }; pypi_satisfies_requirement( &spec_without_rev, @@ -2917,22 +2946,25 @@ mod tests { // Currently this test is missing from `good_satisfiability`, so we test the // specific windows case here this should work an all supported platforms + // + // Do not use windows here: The path gets normalized to something unix-y, and + // the lockfile keeps the "pretty" path the suer filled in at all times. So + // on windows the test fails. #[test] - fn test_windows_absolute_path_handling() { + fn test_unix_absolute_path_handling() { // Mock locked data let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), - version: Version::from_str("0.1.0").unwrap(), - location: UrlOrPath::Path("C:\\Users\\username\\mypkg.tar.gz".into()), + version: Some(Version::from_str("0.1.0").unwrap()), + location: Verbatim::new(UrlOrPath::Path("/home/username/mypkg.tar.gz".into())), hash: None, + index_url: None, requires_dist: vec![], requires_python: None, - editable: false, }; let spec = - pep508_rs::Requirement::from_str("mypkg @ file:///C:\\Users\\username\\mypkg.tar.gz") - .unwrap(); + pep508_rs::Requirement::from_str("mypkg @ file:///home/username/mypkg.tar.gz").unwrap(); let spec = pep508_requirement_to_uv_requirement(spec).unwrap(); diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index 2add29e899..6fbb1fc62e 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -61,7 +61,7 @@ use crate::{ read_environment_file, write_environment_file, }, lock_file::{ - self, PypiRecord, reporter::SolveProgressBar, + self, PypiPackageData, reporter::SolveProgressBar, virtual_packages::validate_system_meets_environment_requirements, }, workspace::{ @@ -614,7 +614,9 @@ impl<'p> LockFileDerivedData<'p> { // If we contain source packages from conda or PyPI we update the prefix by // default let contains_conda_source_pkgs = self.lock_file.environments().any(|(_, env)| { - env.conda_packages(Platform::current()) + self.lock_file + .platform(&Platform::current().to_string()) + .and_then(|p| env.conda_packages(p)) .is_some_and(|mut packages| { packages.any(|package| package.as_source().is_some()) }) @@ -680,7 +682,8 @@ impl<'p> LockFileDerivedData<'p> { &filter.skip_direct, &filter.target_packages, ); - let result = subset.filter(locked_env.packages(platform))?; + let lock_platform = self.lock_file.platform(&platform.to_string()); + let result = subset.filter(lock_platform.and_then(|p| locked_env.packages(p)))?; let packages = result.install; let ignored = result.ignore; @@ -692,7 +695,7 @@ impl<'p> LockFileDerivedData<'p> { let (ignored_conda, ignored_pypi): (HashSet<_>, HashSet<_>) = ignored.into_iter().partition_map(|p| match p { LockedPackageRef::Conda(data) => Either::Left(data.record().name.clone()), - LockedPackageRef::Pypi(data, _) => Either::Right(data.name.clone()), + LockedPackageRef::Pypi(data) => Either::Right(data.name.clone()), }); let pixi_records = @@ -706,10 +709,13 @@ impl<'p> LockFileDerivedData<'p> { let pypi_records = pypi_packages .into_iter() .filter_map(LockedPackageRef::as_pypi) - .map(|(data, env_data)| { - let mut data = data.clone(); - data.editable = is_editable_from_manifest(&manifest_pypi_deps, &data.name); - (data, env_data.clone()) + .map(move |data| { + ( + data.clone(), + pixi_install_pypi::ManifestData { + editable: is_editable_from_manifest(&manifest_pypi_deps, &data.name), + }, + ) }) .collect::>(); @@ -884,7 +890,8 @@ impl<'p> LockFileDerivedData<'p> { // Get the locked environment from the lock-file. let locked_env = self.locked_env(environment)?; - let packages = locked_env.packages(platform); + let lock_platform = self.lock_file.platform(&platform.to_string()); + let packages = lock_platform.and_then(|p| locked_env.packages(p)); let packages = if let Some(iter) = packages { iter.collect_vec() } else { @@ -954,7 +961,10 @@ impl PackageFilterNames { &filter.skip_direct, &filter.target_packages, ); - let filtered = subset.filter(environment.packages(platform)).ok()?; + let lock_platform = environment.lock_file().platform(&platform.to_string()); + let filtered = subset + .filter(lock_platform.and_then(|p| environment.packages(p))) + .ok()?; // Map to names, dedupe and sort for stable output. let retained = filtered @@ -1349,7 +1359,8 @@ impl<'p> UpdateContextBuilder<'p> { .map(move |locked_env| { locked_env .conda_packages_by_platform() - .map(|(platform, records)| { + .map(|(lock_platform, records)| { + let platform = lock_platform.subdir(); records .cloned() .map(|data| { @@ -1379,12 +1390,12 @@ impl<'p> UpdateContextBuilder<'p> { env.clone(), locked_env .pypi_packages_by_platform() - .map(|(platform, records)| { + .map(|(lock_platform, records)| { ( - platform, - Arc::new(PypiRecordsByName::from_iter(records.map( - |(data, env_data)| (data.clone(), env_data.clone()), - ))), + lock_platform.subdir(), + Arc::new(PypiRecordsByName::from_iter( + records.map(|data| data.clone()), + )), ) }) .collect(), @@ -2026,7 +2037,22 @@ impl<'p> UpdateContext<'p> { } // Construct a new lock-file containing all the updated or old records. - let mut builder = LockFile::builder(); + // First, collect all platforms across all environments and register them. + let all_platforms: Vec = project + .environments() + .into_iter() + .flat_map(|env| env.platforms()) + .collect::>() + .into_iter() + .map(|p| rattler_lock::PlatformData { + name: rattler_lock::PlatformName::from(&p), + subdir: p, + virtual_packages: Vec::new(), + }) + .collect(); + let mut builder = LockFile::builder() + .with_platforms(all_platforms) + .expect("all platforms should be unique"); // Iterate over all environments and add their records to the lock-file. for environment in project.environments() { @@ -2059,29 +2085,29 @@ impl<'p> UpdateContext<'p> { .unwrap_or_default() .into(), exclude_newer: grouped_env.exclude_newer(), - pypi_prerelease_mode: Some(pypi_prerelease_mode.into()), + pypi_prerelease_mode: pypi_prerelease_mode.into(), }, ); let mut has_pypi_records = false; for platform in environment.platforms() { + let platform_str = platform.to_string(); if let Some(records) = self.take_latest_repodata_records(&environment, platform) { for record in records.into_inner() { - builder.add_conda_package( - &environment_name, - platform, - record.into_conda_package_data(project.root()), - ); + builder + .add_conda_package( + &environment_name, + &platform_str, + record.into_conda_package_data(project.root()), + ) + .expect("platform was registered"); } } if let Some(records) = self.take_latest_pypi_records(&environment, platform) { - for (pkg_data, pkg_env_data) in records.into_inner() { - builder.add_pypi_package( - &environment_name, - platform, - pkg_data, - pkg_env_data, - ); + for pkg_data in records.into_inner() { + builder + .add_pypi_package(&environment_name, &platform_str, pkg_data) + .expect("platform was registered"); has_pypi_records = true; } } @@ -2396,7 +2422,7 @@ async fn spawn_extract_environment_task( enum PackageRecord<'a> { Conda(&'a PixiRecord), - Pypi((&'a PypiRecord, Option)), + Pypi((&'a PypiPackageData, Option)), } // Determine the conda packages we need. @@ -2544,7 +2570,7 @@ async fn spawn_extract_environment_task( .into_diagnostic()? .unwrap_or_default(); - for req in record.0.requires_dist.iter() { + for req in record.requires_dist.iter() { // Evaluate the marker environment with the given extras if let Some(marker_env) = &marker_environment { // let marker_str = marker_env.to_string(); @@ -2577,7 +2603,7 @@ async fn spawn_extract_environment_task( } // Insert the record if it is not already present - pypi_records.entry(record.0.name.clone()).or_insert(record); + pypi_records.entry(record.name.clone()).or_insert(record); } } } diff --git a/crates/pixi_core/src/lock_file/utils.rs b/crates/pixi_core/src/lock_file/utils.rs index 76fc534b4d..31517bcba6 100644 --- a/crates/pixi_core/src/lock_file/utils.rs +++ b/crates/pixi_core/src/lock_file/utils.rs @@ -37,7 +37,18 @@ pub fn filter_lock_file< lock_file: &'lock LockFile, mut filter: F, ) -> LockFile { - let mut builder = LockFileBuilder::new(); + // Register all platforms from the original lock file. + let platforms: Vec = lock_file + .platforms() + .map(|p| rattler_lock::PlatformData { + name: p.name().clone(), + subdir: p.subdir(), + virtual_packages: p.virtual_packages().to_vec(), + }) + .collect(); + let mut builder = LockFileBuilder::new() + .with_platforms(platforms) + .expect("lock file platforms should be unique"); for (environment_name, environment) in lock_file.environments() { // Find the environment in the project @@ -58,10 +69,14 @@ pub fn filter_lock_file< builder.set_pypi_indexes(environment_name, indexes); // Copy all packages that don't need to be relaxed - for (platform, packages) in environment.packages_by_platform() { + for (lock_platform, packages) in environment.packages_by_platform() { + let platform = lock_platform.subdir(); + let platform_str = platform.to_string(); for package in packages { if filter(&project_env, platform, package) { - builder.add_package(environment_name, platform, package.into()); + builder + .add_package(environment_name, &platform_str, package.into()) + .expect("platform was registered"); } } } diff --git a/crates/pixi_core/src/lock_file/virtual_packages.rs b/crates/pixi_core/src/lock_file/virtual_packages.rs index df8d523256..dbfd2039dc 100644 --- a/crates/pixi_core/src/lock_file/virtual_packages.rs +++ b/crates/pixi_core/src/lock_file/virtual_packages.rs @@ -155,7 +155,8 @@ pub(crate) fn validate_system_meets_environment_requirements( )?; // Retrieve all conda packages for the specified platform (both binary and source). - let Some(conda_packages) = environment.conda_packages(platform) else { + let lock_platform = environment.lock_file().platform(&platform.to_string()); + let Some(conda_packages) = lock_platform.and_then(|p| environment.conda_packages(p)) else { // Early out if there are no packages, as we don't need to check for virtual packages return Ok(true); }; @@ -255,8 +256,8 @@ pub(crate) fn validate_system_meets_environment_requirements( } // Check if the wheel tags match the system virtual packages if there are any - if environment.has_pypi_packages(platform) - && let Some(pypi_packages) = environment.pypi_packages(platform) + if lock_platform.is_some_and(|p| environment.has_pypi_packages(p)) + && let Some(pypi_packages) = lock_platform.and_then(|p| environment.pypi_packages(p)) { // Get python record from conda packages let python_record = conda_records @@ -265,9 +266,7 @@ pub(crate) fn validate_system_meets_environment_requirements( .ok_or(MachineValidationError::NoPythonRecordFound(platform))?; // Check if all the wheel tags match the system virtual packages - let pypi_packages = pypi_packages - .map(|(pkg_data, _)| pkg_data.clone()) - .collect_vec(); + let pypi_packages = pypi_packages.map(|pkg_data| pkg_data.clone()).collect_vec(); let wheels = get_wheels_from_pypi_package_data(pypi_packages); @@ -305,8 +304,9 @@ mod test { let lockfile = LockFile::from_path(&lockfile_path).unwrap(); let platform = Platform::Linux64; let env = lockfile.default_environment().unwrap(); + let lock_platform = lockfile.platform(&platform.to_string()).unwrap(); let conda_data = env - .conda_repodata_records(platform) + .conda_repodata_records(lock_platform) .map_err(MachineValidationError::RepodataConversionError) .unwrap() .unwrap(); diff --git a/crates/pixi_core/src/workspace/mod.rs b/crates/pixi_core/src/workspace/mod.rs index 54d42203f1..733f0c9af1 100644 --- a/crates/pixi_core/src/workspace/mod.rs +++ b/crates/pixi_core/src/workspace/mod.rs @@ -764,7 +764,7 @@ impl Workspace { LockedPackageRef::Conda(package) => { !conda_packages.contains(&package.record().name) } - LockedPackageRef::Pypi(package, _env) => !pypi_packages.contains(&package.name), + LockedPackageRef::Pypi(package) => !pypi_packages.contains(&package.name), } } else { true diff --git a/crates/pixi_core/src/workspace/workspace_mut.rs b/crates/pixi_core/src/workspace/workspace_mut.rs index 5d2f8ec34b..f2c57dbf64 100644 --- a/crates/pixi_core/src/workspace/workspace_mut.rs +++ b/crates/pixi_core/src/workspace/workspace_mut.rs @@ -532,7 +532,8 @@ impl WorkspaceMut { // platforms .filter_map(|(env, platform)| { let locked_env = updated_lock_file.environment(&env)?; - locked_env.conda_repodata_records(platform).ok()? + let lock_platform = updated_lock_file.platform(&platform.to_string())?; + locked_env.conda_repodata_records(lock_platform).ok()? }) .flatten() .collect_vec(); @@ -617,7 +618,10 @@ impl WorkspaceMut { // Get all the conda and pypi records for the combination of environments and // platforms .iter() - .filter_map(|(env, platform)| env.pypi_packages(*platform)) + .filter_map(|(env, platform)| { + let lock_platform = env.lock_file().platform(&platform.to_string())?; + env.pypi_packages(lock_platform) + }) .flatten() .collect_vec(); @@ -632,9 +636,11 @@ impl WorkspaceMut { let version_constraint = pinning_strategy.determine_version_constraint( pypi_records .iter() - .filter_map(|(data, _)| { + .filter_map(|data| { if &data.name == name.as_normalized() { - Version::from_str(&data.version.to_string()).ok() + data.version + .as_ref() + .and_then(|v| Version::from_str(&v.to_string()).ok()) } else { None } diff --git a/crates/pixi_diff/src/lib.rs b/crates/pixi_diff/src/lib.rs index 5bbed7820b..7d09ba2d76 100644 --- a/crates/pixi_diff/src/lib.rs +++ b/crates/pixi_diff/src/lib.rs @@ -46,14 +46,18 @@ impl LockFileDiff { let mut environment_diff = IndexMap::new(); - for (platform, packages) in environment.packages_by_platform() { + for (lock_platform, packages) in environment.packages_by_platform() { + let platform = lock_platform.subdir(); // Determine the packages that were previously there. let (mut previous_conda_packages, mut previous_pypi_packages): ( HashMap<_, _>, HashMap<_, _>, ) = previous .as_ref() - .and_then(|e| e.packages(platform)) + .and_then(|e| { + let p = e.lock_file().platform(lock_platform.name())?; + e.packages(p) + }) .into_iter() .flatten() .partition_map(|p| match p { @@ -61,11 +65,8 @@ impl LockFileDiff { conda_package_data.record().name.clone(), conda_package_data, )), - LockedPackageRef::Pypi(pypi_package_data, pypi_env_data) => { - Either::Right(( - pypi_package_data.name.clone(), - (pypi_package_data, pypi_env_data), - )) + LockedPackageRef::Pypi(pypi_package_data) => { + Either::Right((pypi_package_data.name.clone(), pypi_package_data)) } }); @@ -87,19 +88,15 @@ impl LockFileDiff { _ => {} } } - LockedPackageRef::Pypi(data, env) => { + LockedPackageRef::Pypi(data) => { let name = &data.name; match previous_pypi_packages.remove(name) { - Some((previous_data, previous_env)) - if previous_data.location != data.location => - { - diff.changed.push(( - (previous_data.clone(), previous_env.clone()).into(), - (data.clone(), env.clone()).into(), - )); + Some(previous_data) if previous_data.location != data.location => { + diff.changed + .push((previous_data.clone().into(), data.clone().into())); } None => { - diff.added.push((data.clone(), env.clone()).into()); + diff.added.push(data.clone().into()); } _ => {} } @@ -111,22 +108,23 @@ impl LockFileDiff { for (_, p) in previous_conda_packages { diff.removed.push(p.clone().into()); } - for (_, (data, env)) in previous_pypi_packages { - diff.removed.push((data.clone(), env.clone()).into()); + for (_, data) in previous_pypi_packages { + diff.removed.push(data.clone().into()); } environment_diff.insert(platform, diff); } // Find platforms that were completely removed - for (platform, packages) in previous + for (lock_platform, packages) in previous .as_ref() .map(|e| e.packages_by_platform()) .into_iter() .flatten() - .filter(|(platform, _)| !environment_diff.contains_key(platform)) + .filter(|(p, _)| !environment_diff.contains_key(&p.subdir())) .collect_vec() { + let platform = lock_platform.subdir(); let mut diff = PackagesDiff::default(); for package in packages { diff.removed.push(package.into()); @@ -149,12 +147,12 @@ impl LockFileDiff { .collect_vec() { let mut environment_diff = IndexMap::new(); - for (platform, packages) in environment.packages_by_platform() { + for (lock_platform, packages) in environment.packages_by_platform() { let mut diff = PackagesDiff::default(); for package in packages { diff.removed.push(package.into()); } - environment_diff.insert(platform, diff); + environment_diff.insert(lock_platform.subdir(), diff); } result .environment @@ -257,7 +255,7 @@ impl LockFileDiff { LockedPackage::Conda(p) => { format!("{} {}", &p.record().version.as_str(), &p.record().build) } - LockedPackage::Pypi(p, _) => p.version.to_string(), + LockedPackage::Pypi(p) => p.version_string(), } } @@ -324,20 +322,16 @@ impl LockFileDiff { choose_style(current.build.as_str(), previous.build.as_str()), ) } - (LockedPackage::Pypi(previous, _), LockedPackage::Pypi(current, _)) => { + (LockedPackage::Pypi(previous), LockedPackage::Pypi(current)) => { + let prev_ver = previous.version_string(); + let curr_ver = current.version_string(); format!( "{} {} {}\t{}\t->\t{}", console::style("~").yellow(), consts::PypiEmoji, name, - choose_style( - &previous.version.to_string(), - ¤t.version.to_string() - ), - choose_style( - ¤t.version.to_string(), - &previous.version.to_string() - ), + choose_style(&prev_ver, &curr_ver), + choose_style(&curr_ver, &prev_ver), ) } _ => unreachable!(), @@ -412,7 +406,7 @@ impl LockFileJsonDiff { ty: JsonPackageType::Conda, explicit: conda_dependencies.contains_key(&pkg.record().name), }, - LockedPackage::Pypi(pkg, _) => JsonPackageDiff { + LockedPackage::Pypi(pkg) => JsonPackageDiff { name: pkg.name.as_dist_info_name().into_owned(), before: None, after: Some( @@ -434,7 +428,7 @@ impl LockFileJsonDiff { explicit: conda_dependencies.contains_key(&pkg.record().name), }, - LockedPackage::Pypi(pkg, _) => JsonPackageDiff { + LockedPackage::Pypi(pkg) => JsonPackageDiff { name: pkg.name.as_dist_info_name().into_owned(), before: Some( serde_json::to_value(&pkg).expect("should be able to serialize"), @@ -459,7 +453,7 @@ impl LockFileJsonDiff { explicit: conda_dependencies.contains_key(&old.record().name), } } - (LockedPackage::Pypi(old, _), LockedPackage::Pypi(new, _)) => { + (LockedPackage::Pypi(old), LockedPackage::Pypi(new)) => { let before = serde_json::to_value(&old).expect("should be able to serialize"); let after = serde_json::to_value(&new).expect("should be able to serialize"); let (before, after) = compute_json_diff(before, after); diff --git a/crates/pixi_global/src/install.rs b/crates/pixi_global/src/install.rs index 65c7367d62..4bfb73d99f 100644 --- a/crates/pixi_global/src/install.rs +++ b/crates/pixi_global/src/install.rs @@ -369,11 +369,15 @@ mod tests { #[fixture] fn ripgrep_records() -> Vec { - LockFile::from_str(include_str!("./test_data/lockfiles/ripgrep.lock")) - .unwrap() - .default_environment() + let lock = LockFile::from_str_with_base_directory( + include_str!("./test_data/lockfiles/ripgrep.lock"), + None, + ) + .unwrap(); + let p = lock.platform(&Platform::Linux64.to_string()).unwrap(); + lock.default_environment() .unwrap() - .conda_repodata_records(Platform::Linux64) + .conda_repodata_records(p) .unwrap() .unwrap() .into_iter() @@ -391,11 +395,15 @@ mod tests { #[fixture] fn ripgrep_bat_records() -> Vec { - LockFile::from_str(include_str!("./test_data/lockfiles/ripgrep_bat.lock")) - .unwrap() - .default_environment() + let lock = LockFile::from_str_with_base_directory( + include_str!("./test_data/lockfiles/ripgrep_bat.lock"), + None, + ) + .unwrap(); + let p = lock.platform(&Platform::Linux64.to_string()).unwrap(); + lock.default_environment() .unwrap() - .conda_repodata_records(Platform::Linux64) + .conda_repodata_records(p) .unwrap() .unwrap() .into_iter() diff --git a/crates/pixi_install_pypi/src/conversions.rs b/crates/pixi_install_pypi/src/conversions.rs index c945acd25c..075f529eaf 100644 --- a/crates/pixi_install_pypi/src/conversions.rs +++ b/crates/pixi_install_pypi/src/conversions.rs @@ -95,10 +95,11 @@ pub enum ConvertToUvDistError { /// Convert from a PypiPackageData to a uv [`distribution_types::Dist`] pub fn convert_to_dist( pkg: &PypiPackageData, + manifest_data: &crate::ManifestData, lock_file_dir: &Path, ) -> Result { // Figure out if it is a url from the registry or a direct url - let dist = match &pkg.location { + let dist = match &*pkg.location { UrlOrPath::Url(url) if is_direct_url(url.scheme()) => { let url_without_direct = strip_direct_scheme(url); let pkg_name = to_uv_normalize(&pkg.name)?; @@ -178,7 +179,11 @@ pub fn convert_to_dist( })) } else { let pkg_name = to_uv_normalize(&pkg.name)?; - let pkg_version = to_uv_version(&pkg.version)?; + let pkg_version = to_uv_version( + pkg.version + .as_ref() + .expect("registry source dists always have a version"), + )?; Dist::Source(SourceDist::Registry(RegistrySourceDist { name: pkg_name, version: pkg_version, @@ -211,7 +216,7 @@ pub fn convert_to_dist( pkg_name, absolute_url, &abs_path, - Some(pkg.editable), + Some(manifest_data.editable), Some(false), )? } else { @@ -249,18 +254,22 @@ mod tests { // Pass into locked data let locked = PypiPackageData { name: "torch".parse().unwrap(), - version: Version::from_str("2.3.0+cu121").unwrap(), - location: UrlOrPath::Url(url), + version: Some(Version::from_str("2.3.0+cu121").unwrap()), + location: UrlOrPath::Url(url).into(), hash: None, + index_url: None, requires_dist: vec![], requires_python: None, - editable: false, }; // Convert the locked data to a uv dist // check if it does not panic - let dist = convert_to_dist(&locked, &PathBuf::new()) - .expect("could not convert wheel with special chars to dist"); + let dist = convert_to_dist( + &locked, + &crate::ManifestData { editable: false }, + &PathBuf::new(), + ) + .expect("could not convert wheel with special chars to dist"); // Check if the dist is a built dist assert!(!dist.filename().unwrap().contains("%2B")); diff --git a/crates/pixi_install_pypi/src/lib.rs b/crates/pixi_install_pypi/src/lib.rs index a462fb1fb4..9b0f6daf6e 100644 --- a/crates/pixi_install_pypi/src/lib.rs +++ b/crates/pixi_install_pypi/src/lib.rs @@ -31,7 +31,7 @@ use pypi_modifiers::{ pypi_tags::{get_pypi_tags, is_python_record}, }; use rattler_conda_types::Platform; -use rattler_lock::{PypiIndexes, PypiPackageData, PypiPackageEnvironmentData}; +use rattler_lock::{PypiIndexes, PypiPackageData}; use rayon::prelude::*; use utils::elapsed; use uv_auth::store_credentials_from_url; @@ -51,7 +51,12 @@ use uv_resolver::{ExcludeNewer, FlatIndex}; use crate::plan::{CachedWheels, RequiredDists}; -pub type PyPIRecords = (PypiPackageData, PypiPackageEnvironmentData); +/// Extra data available from the manifest, not the lockfile +pub struct ManifestData { + pub editable: bool, +} + +pub type PyPIRecords = (PypiPackageData, ManifestData); pub(crate) mod conda_pypi_clobber; pub(crate) mod conversions; @@ -414,6 +419,7 @@ impl<'a> PyPIEnvironmentUpdater<'a> { &planner_config.index_locations, index_strategy, None, + Connectivity::Online, ); // Resolve the flat indexes from `--find-links`. @@ -473,11 +479,12 @@ impl<'a> PyPIEnvironmentUpdater<'a> { /// Create the installation plan by analyzing current state vs requirements async fn create_installation_plan( &self, - pypi_records: &[(PypiPackageData, PypiPackageEnvironmentData)], + pypi_records: &[crate::PyPIRecords], planner_config: &UvInstallerPlannerConfig, ) -> miette::Result { // Create required distributions with pre-created Dist objects - let required_packages: Vec<_> = pypi_records.iter().map(|(pkg, _)| pkg.clone()).collect(); + let required_packages: Vec<_> = + pypi_records.iter().map(|(pkg, spec)| (pkg, spec)).collect(); let required_dists = RequiredDists::from_packages(&required_packages, self.config.lock_file_dir) .into_diagnostic() diff --git a/crates/pixi_install_pypi/src/plan/models.rs b/crates/pixi_install_pypi/src/plan/models.rs index 98bb3b3043..c8299cb033 100644 --- a/crates/pixi_install_pypi/src/plan/models.rs +++ b/crates/pixi_install_pypi/src/plan/models.rs @@ -44,7 +44,7 @@ pub(crate) enum NeedReinstall { /// The package is not installed VersionMismatch { installed_version: uv_pep440::Version, - locked_version: pep440_rs::Version, + locked_version: String, }, /// The `direct_url.json` file is missing MissingDirectUrl, @@ -56,7 +56,7 @@ pub(crate) enum NeedReinstall { UnableToConvertLockedPath { path: String }, /// The editable status of the installed wheel changed with regards to the locked version EditableStatusChanged { - locked_editable: bool, + required_editable: bool, installed_editable: bool, }, /// Somehow unable to parse the installed dist url @@ -113,7 +113,7 @@ impl std::fmt::Display for NeedReinstall { write!(f, "Unable to parse file url: {url}") } NeedReinstall::EditableStatusChanged { - locked_editable, + required_editable: locked_editable, installed_editable, } => { write!( diff --git a/crates/pixi_install_pypi/src/plan/planner.rs b/crates/pixi_install_pypi/src/plan/planner.rs index c81a1d265d..87d6425068 100644 --- a/crates/pixi_install_pypi/src/plan/planner.rs +++ b/crates/pixi_install_pypi/src/plan/planner.rs @@ -135,9 +135,8 @@ impl InstallPlanner { )); } else { // Check if we need to reinstall - match need_reinstall(dist, required_pkg, &self.lock_file_dir)? { + match need_reinstall(dist, required_pkg, required_dist, &self.lock_file_dir)? { ValidateCurrentInstall::Keep => { - // if self.uv_cache.must_revalidate_package(dist.name()) { reinstalls .push((dist.clone(), NeedReinstall::ReinstallationRequested)); diff --git a/crates/pixi_install_pypi/src/plan/required_dists.rs b/crates/pixi_install_pypi/src/plan/required_dists.rs index ca9354b698..983ccc40f4 100644 --- a/crates/pixi_install_pypi/src/plan/required_dists.rs +++ b/crates/pixi_install_pypi/src/plan/required_dists.rs @@ -30,17 +30,17 @@ impl RequiredDists { /// # Returns /// A RequiredDists instance or an error if conversion fails pub fn from_packages( - packages: &[PypiPackageData], + packages: &[(&PypiPackageData, &crate::ManifestData)], lock_file_dir: impl AsRef, ) -> Result { let mut dists = HashMap::new(); - for pkg in packages { + for (pkg, manifest_data) in packages { let uv_name = PackageName::from_str(pkg.name.as_ref()).map_err(|_| { ConvertToUvDistError::InvalidPackageName(pkg.name.as_ref().to_string()) })?; - let dist = convert_to_dist(pkg, lock_file_dir.as_ref())?; - dists.insert(uv_name, (pkg.clone(), dist)); + let dist = convert_to_dist(pkg, manifest_data, lock_file_dir.as_ref())?; + dists.insert(uv_name, ((*pkg).clone(), dist)); } Ok(Self(dists)) diff --git a/crates/pixi_install_pypi/src/plan/test/harness.rs b/crates/pixi_install_pypi/src/plan/test/harness.rs index 0617fcbdbb..d7925a1073 100644 --- a/crates/pixi_install_pypi/src/plan/test/harness.rs +++ b/crates/pixi_install_pypi/src/plan/test/harness.rs @@ -1,3 +1,4 @@ +use crate::ManifestData; use crate::plan::InstallPlanner; use crate::plan::cache::DistCache; use crate::plan::installed_dists::InstalledDists; @@ -372,7 +373,7 @@ impl PyPIPackageDataBuilder { fn registry>(name: S, version: S) -> PypiPackageData { PypiPackageData { name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), - version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), + version: Some(pep440_rs::Version::from_str(version.as_ref()).unwrap()), // We don't check these fields, for determining the installation from a registry // requires_dist: vec![], @@ -384,21 +385,23 @@ impl PyPIPackageDataBuilder { version = version.as_ref() )) .unwrap(), - ), + ) + .into(), hash: None, - editable: false, + index_url: None, } } - fn path>(name: S, version: S, path: PathBuf, editable: bool) -> PypiPackageData { + fn path>(name: S, version: S, path: PathBuf) -> PypiPackageData { PypiPackageData { name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), - version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), + version: Some(pep440_rs::Version::from_str(version.as_ref()).unwrap()), requires_dist: vec![], requires_python: None, - location: UrlOrPath::Path(Utf8TypedPathBuf::from(path.to_string_lossy().to_string())), + location: UrlOrPath::Path(Utf8TypedPathBuf::from(path.to_string_lossy().to_string())) + .into(), hash: None, - editable, + index_url: None, } } @@ -411,17 +414,17 @@ impl PyPIPackageDataBuilder { }; PypiPackageData { name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), - version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), + version: Some(pep440_rs::Version::from_str(version.as_ref()).unwrap()), requires_dist: vec![], requires_python: None, - location: UrlOrPath::Url(url), + location: UrlOrPath::Url(url).into(), hash: None, - editable: false, + index_url: None, } } } -/// Implementor of the [`DistCache`] that does not cache anything +/// Implementer of the [`DistCache`] that does not cache anything pub struct NoCache; impl<'a> DistCache<'a> for NoCache { @@ -435,7 +438,7 @@ impl<'a> DistCache<'a> for NoCache { } } -/// Implementor of the [`DistCache`] that assumes to have cached everything +/// Implementer of the [`DistCache`] that assumes to have cached everything pub struct AllCached; impl<'a> DistCache<'a> for AllCached { fn is_cached( @@ -477,7 +480,7 @@ impl<'a> DistCache<'a> for AllCached { /// Struct to create the required packages map #[derive(Default)] pub struct RequiredPackages { - required: HashMap, + required: HashMap, } impl RequiredPackages { @@ -490,7 +493,8 @@ impl RequiredPackages { let package_name = uv_normalize::PackageName::from_owned(name.as_ref().to_owned()) .expect("should be correct"); let data = PyPIPackageDataBuilder::registry(name, version); - self.required.insert(package_name, data); + self.required + .insert(package_name, (data, ManifestData { editable: false })); self } @@ -504,16 +508,18 @@ impl RequiredPackages { ) -> Self { let package_name = uv_normalize::PackageName::from_owned(name.as_ref().to_owned()) .expect("should be correct"); - let data = PyPIPackageDataBuilder::path(name, version, path, editable); - self.required.insert(package_name, data); + let data = PyPIPackageDataBuilder::path(name, version, path); + self.required + .insert(package_name, (data, ManifestData { editable })); self } pub fn add_local_wheel>(mut self, name: S, version: S, path: PathBuf) -> Self { let package_name = uv_normalize::PackageName::from_owned(name.as_ref().to_owned()) .expect("should be correct"); - let data = PyPIPackageDataBuilder::path(name, version, path, false); - self.required.insert(package_name, data); + let data = PyPIPackageDataBuilder::path(name, version, path); + self.required + .insert(package_name, (data, ManifestData { editable: false })); self } @@ -521,7 +527,8 @@ impl RequiredPackages { let package_name = uv_normalize::PackageName::from_owned(name.as_ref().to_owned()) .expect("should be correct"); let data = PyPIPackageDataBuilder::url(name, version, url, UrlType::Direct); - self.required.insert(package_name, data); + self.required + .insert(package_name, (data, ManifestData { editable: false })); self } @@ -529,14 +536,15 @@ impl RequiredPackages { let package_name = uv_normalize::PackageName::from_owned(name.as_ref().to_owned()) .expect("should be correct"); let data = PyPIPackageDataBuilder::url(name, version, url, UrlType::Other); - self.required.insert(package_name, data); + self.required + .insert(package_name, (data, ManifestData { editable: false })); self } /// Convert to RequiredDists for the new install planner API /// Uses the default lock file directory from the test setup pub fn to_required_dists(&self) -> super::super::RequiredDists { - let packages: Vec<_> = self.required.values().cloned().collect(); + let packages: Vec<_> = self.required.values().map(|(p, m)| (p, m)).collect(); super::super::RequiredDists::from_packages(&packages, default_lock_file_dir()) .expect("Failed to create RequiredDists in test") } @@ -546,7 +554,7 @@ impl RequiredPackages { &self, lock_dir: impl AsRef, ) -> super::super::RequiredDists { - let packages: Vec<_> = self.required.values().cloned().collect(); + let packages: Vec<_> = self.required.values().map(|(p, m)| (p, m)).collect(); super::super::RequiredDists::from_packages(&packages, lock_dir) .expect("Failed to create RequiredDists in test") } diff --git a/crates/pixi_install_pypi/src/plan/test/mod.rs b/crates/pixi_install_pypi/src/plan/test/mod.rs index 02d01b9614..380be30c1a 100644 --- a/crates/pixi_install_pypi/src/plan/test/mod.rs +++ b/crates/pixi_install_pypi/src/plan/test/mod.rs @@ -575,7 +575,7 @@ fn test_installed_editable_required_non_editable() { assert_matches!( installs.reinstalls[0].1, NeedReinstall::EditableStatusChanged { - locked_editable: false, + required_editable: false, installed_editable: true } ); diff --git a/crates/pixi_install_pypi/src/plan/validation.rs b/crates/pixi_install_pypi/src/plan/validation.rs index 2f408a1551..7a18de45e8 100644 --- a/crates/pixi_install_pypi/src/plan/validation.rs +++ b/crates/pixi_install_pypi/src/plan/validation.rs @@ -6,7 +6,7 @@ use pixi_uv_conversions::{to_parsed_git_url, to_uv_version}; use rattler_lock::{PypiPackageData, UrlOrPath}; use url::Url; use uv_cache_info::CacheInfoError; -use uv_distribution_types::{InstalledDist, InstalledDistKind}; +use uv_distribution_types::{Dist, InstalledDist, InstalledDistKind}; use uv_pypi_types::{ParsedGitUrl, ParsedUrlError}; use crate::utils::{check_url_freshness, strip_direct_scheme}; @@ -30,29 +30,35 @@ pub enum NeedsReinstallError { /// Check if a package needs to be reinstalled pub(crate) fn need_reinstall( - installed: &InstalledDist, - locked: &PypiPackageData, + installed_dist: &InstalledDist, + required_pkg: &PypiPackageData, + required_dist: &Dist, lock_file_dir: &Path, ) -> Result { // Check if the installed version is the same as the required version - match &installed.kind { + match &installed_dist.kind { InstalledDistKind::Registry(reg) => { - if !matches!(locked.location, UrlOrPath::Url(_)) { + if !matches!(*required_pkg.location, UrlOrPath::Url(_)) { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::SourceMismatch { - locked_location: locked.location.to_string(), + locked_location: required_pkg.location.to_string(), installed_location: "registry".to_string(), }, )); } - let specifier = to_uv_version(&locked.version)?; + let specifier = to_uv_version( + required_pkg + .version + .as_ref() + .expect("registry packages always have a version"), + )?; if reg.version != specifier { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::VersionMismatch { installed_version: reg.version.clone(), - locked_version: locked.version.clone(), + locked_version: required_pkg.version_string(), }, )); } @@ -85,7 +91,7 @@ pub(crate) fn need_reinstall( match result { Ok(url) => { // Convert the locked location, which can be a path or a url, to a url - let locked_url = match &locked.location { + let locked_url = match &*required_pkg.location { // Fine if it is already a url UrlOrPath::Url(url) => url.clone(), // Do some path mangling if it is actually a path to get it into a url @@ -116,7 +122,7 @@ pub(crate) fn need_reinstall( if url == locked_url { // Okay so these are the same, but we need to check if the cache is newer // than the source directory - if !check_url_freshness(&url, installed)? { + if !check_url_freshness(&url, installed_dist)? { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::SourceDirectoryNewerThanCache, )); @@ -125,7 +131,10 @@ pub(crate) fn need_reinstall( return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::UrlMismatch { installed_url: url.to_string(), - locked_url: locked.location.as_url().map(|u| u.to_string()), + locked_url: required_pkg + .location + .as_url() + .map(|u| u.to_string()), }, )); } @@ -136,11 +145,19 @@ pub(crate) fn need_reinstall( )); } } - // If editable status changed also re-install - if dir_info.editable.unwrap_or_default() != locked.editable { + eprintln!( + "Dirinfo: Editable: {}", + dir_info.editable.unwrap_or_default() + ); + eprintln!("installed: {}", installed_dist.is_editable()); + eprintln!( + "required_dist.is_editable(): {}", + required_dist.is_editable() + ); + if dir_info.editable.unwrap_or_default() != required_dist.is_editable() { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::EditableStatusChanged { - locked_editable: locked.editable, + required_editable: required_dist.is_editable(), installed_editable: dir_info.editable.unwrap_or_default(), }, )); @@ -156,7 +173,7 @@ pub(crate) fn need_reinstall( let lock_file_dir = typed_path::Utf8TypedPathBuf::from( lock_file_dir.to_string_lossy().as_ref(), ); - let locked_url = match &locked.location { + let locked_url = match &*required_pkg.location { // Remove `direct+` scheme if it is there so we can compare the required to // the installed url UrlOrPath::Url(url) => strip_direct_scheme(url).into_owned(), @@ -195,7 +212,7 @@ pub(crate) fn need_reinstall( if locked_url == installed_url { // Check cache freshness - if !check_url_freshness(&locked_url, installed)? { + if !check_url_freshness(&locked_url, installed_dist)? { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::ArchiveDistNewerThanCache, )); @@ -204,7 +221,7 @@ pub(crate) fn need_reinstall( return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::UrlMismatch { installed_url: installed_url.to_string(), - locked_url: locked.location.as_url().map(|u| u.to_string()), + locked_url: required_pkg.location.as_url().map(|u| u.to_string()), }, )); } @@ -222,7 +239,7 @@ pub(crate) fn need_reinstall( // Try to parse the locked git url, this can be any url, so this may fail // in practice it always seems to succeed, even with a non-git url - let locked_git_url = match &locked.location { + let locked_git_url = match &*required_pkg.location { UrlOrPath::Url(url) => { // is it a git url? if LockedGitUrl::is_locked_git_url(url) { @@ -294,7 +311,7 @@ pub(crate) fn need_reinstall( Err(_) => { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::UnableToParseGitUrl { - url: locked + url: required_pkg .location .as_url() .map(|u| u.to_string()) @@ -328,7 +345,7 @@ pub(crate) fn need_reinstall( }; // Do some extra checks if the version is the same - let metadata = match installed.read_metadata() { + let metadata = match installed_dist.read_metadata() { Ok(metadata) => metadata, Err(err) => { // Can't be sure lets reinstall @@ -343,7 +360,7 @@ pub(crate) fn need_reinstall( if let Some(ref requires_python) = metadata.requires_python { // If the installed package requires a different requires python version of the locked package, // or if one of them is `Some` and the other is `None`. - match &locked.requires_python { + match &required_pkg.requires_python { Some(locked_requires_python) => { if requires_python.to_string() != locked_requires_python.to_string() { return Ok(ValidateCurrentInstall::Reinstall( @@ -363,7 +380,7 @@ pub(crate) fn need_reinstall( )); } } - } else if let Some(requires_python) = &locked.requires_python { + } else if let Some(requires_python) = &required_pkg.requires_python { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::RequiredPythonChanged { installed_python_require: "None".to_string(), diff --git a/crates/pixi_manifest/src/discovery.rs b/crates/pixi_manifest/src/discovery.rs index e29fbe7c4d..b757661e44 100644 --- a/crates/pixi_manifest/src/discovery.rs +++ b/crates/pixi_manifest/src/discovery.rs @@ -113,12 +113,12 @@ impl Manifests { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - Some(manifest_dir), + manifest_dir, ) }), ManifestKind::Pyproject => PyProjectManifest::deserialize(&mut toml) .map_err(TomlError::from) - .and_then(|manifest| manifest.into_workspace_manifest(Some(manifest_dir))), + .and_then(|manifest| manifest.into_workspace_manifest(manifest_dir)), }; // Handle any errors that occurred during parsing. @@ -413,7 +413,7 @@ impl WorkspaceDiscoverer { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - Some(manifest_dir), + manifest_dir, ) } else { if self.discover_package { @@ -450,7 +450,7 @@ impl WorkspaceDiscoverer { if manifest.has_pixi_workspace() { // Parse the manifest as a workspace manifest if it // contains a workspace - manifest.into_workspace_manifest(Some(manifest_dir)) + manifest.into_workspace_manifest(manifest_dir) } else { if self.discover_package { // Otherwise store the manifest for later to parse as the closest @@ -498,10 +498,10 @@ impl WorkspaceDiscoverer { workspace_manifest.workspace_package_properties(), PackageDefaults::default(), &workspace_manifest, - Some(manifest_dir), + manifest_dir, ), EitherManifest::Pyproject(manifest) => { - manifest.into_package_manifest(&workspace_manifest, Some(manifest_dir)) + manifest.into_package_manifest(&workspace_manifest, manifest_dir) } }; diff --git a/crates/pixi_manifest/src/feature.rs b/crates/pixi_manifest/src/feature.rs index 4fe7366e17..0bc6747f8b 100644 --- a/crates/pixi_manifest/src/feature.rs +++ b/crates/pixi_manifest/src/feature.rs @@ -458,6 +458,8 @@ impl Feature { #[cfg(test)] mod tests { + use std::path::Path; + use assert_matches::assert_matches; use super::*; @@ -465,7 +467,7 @@ mod tests { #[test] fn test_dependencies_borrowed() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [project] name = "foo" @@ -484,6 +486,7 @@ mod tests { [feature.bla.host-dependencies] # empty on purpose "#, + Path::new(""), ) .unwrap(); @@ -530,7 +533,7 @@ mod tests { #[test] fn test_activation() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [project] name = "foo" @@ -543,6 +546,7 @@ mod tests { [target.linux-64.activation] scripts = ["linux-64.bat"] "#, + Path::new(""), ) .unwrap(); @@ -563,7 +567,7 @@ mod tests { #[test] pub fn test_pypi_options_manifest() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [project] name = "foo" @@ -576,6 +580,7 @@ mod tests { [pypi-options] extra-index-urls = ["https://mypypi.org/simple"] "#, + Path::new(""), ) .unwrap(); diff --git a/crates/pixi_manifest/src/manifests/workspace.rs b/crates/pixi_manifest/src/manifests/workspace.rs index 6add4694fe..f0442bcfbe 100644 --- a/crates/pixi_manifest/src/manifests/workspace.rs +++ b/crates/pixi_manifest/src/manifests/workspace.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, fmt::Display, hash::Hash, str::FromStr}; +use std::{collections::HashMap, fmt::Display, hash::Hash, path::Path, str::FromStr}; use indexmap::{Equivalent, IndexMap, IndexSet}; use itertools::Itertools; @@ -46,15 +46,16 @@ pub struct WorkspaceManifest { impl WorkspaceManifest { /// Parses a TOML string into a [`WorkspaceManifest`]. - pub fn from_toml_str + SourceCode>( + pub fn from_toml_str_with_base_dir + SourceCode>( source: S, + root_directory: &Path, ) -> Result> { TomlManifest::from_toml_str(source.as_ref()) .and_then(|manifest| { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - None, + root_directory, ) }) .map(|manifests| manifests.0) @@ -899,7 +900,10 @@ fn handle_missing_target( #[cfg(test)] mod tests { - use std::{path::PathBuf, str::FromStr}; + use std::{ + path::{Path, PathBuf}, + str::FromStr, + }; use indexmap::{IndexMap, IndexSet}; use insta::{assert_debug_snapshot, assert_snapshot, assert_yaml_snapshot}; @@ -975,9 +979,10 @@ start = "python -m flask run --port=5050" panic!("{}", format_parse_error(source, TomlError::from(error))) }); - let manifest = WorkspaceManifest::from_toml_str(source).unwrap_or_else( - |WithSourceCode { error, source }| panic!("{}", format_parse_error(source, error)), - ); + let manifest = WorkspaceManifest::from_toml_str_with_base_dir(source, Path::new("")) + .unwrap_or_else(|WithSourceCode { error, source }| { + panic!("{}", format_parse_error(source, error)) + }); Workspace { manifest, @@ -994,7 +999,7 @@ start = "python -m flask run --port=5050" let manifest = PyProjectManifest::from_toml_str(source) .unwrap_or_else(|error| panic!("{}", format_parse_error(source, error))) - .into_workspace_manifest(None) + .into_workspace_manifest(Path::new("")) .unwrap_or_else(|error| panic!("{}", format_parse_error(source, error))) .0; @@ -1395,7 +1400,7 @@ start = "python -m flask run --port=5050" ); let WithSourceCode { error, source } = - WorkspaceManifest::from_toml_str(contents).unwrap_err(); + WorkspaceManifest::from_toml_str_with_base_dir(contents, Path::new("")).unwrap_err(); assert_snapshot!(format_parse_error(&source, error)); } @@ -3266,7 +3271,7 @@ channels = ["nvidia", "pytorch"] foo = { path = "./foo" } "#; - let manifest = WorkspaceManifest::from_toml_str(toml); + let manifest = WorkspaceManifest::from_toml_str_with_base_dir(toml, Path::new("")); let err = manifest.unwrap_err(); insta::assert_snapshot!(format_parse_error(toml, err.error), @r###" × conda source dependencies are not allowed without enabling the 'pixi-build' preview feature @@ -3385,7 +3390,8 @@ full = ["extra"] use rattler_conda_types::PackageName; use std::str::FromStr; - let workspace = crate::WorkspaceManifest::from_toml_str(contents).unwrap(); + let workspace = + crate::WorkspaceManifest::from_toml_str_with_base_dir(contents, Path::new("")).unwrap(); let openssl = PackageName::from_str("openssl").unwrap(); let zlib = PackageName::from_str("zlib").unwrap(); diff --git a/crates/pixi_manifest/src/pyproject.rs b/crates/pixi_manifest/src/pyproject.rs index cd0222facb..3f5c7b2076 100644 --- a/crates/pixi_manifest/src/pyproject.rs +++ b/crates/pixi_manifest/src/pyproject.rs @@ -8,7 +8,7 @@ use miette::{IntoDiagnostic, Report, WrapErr}; use pep440_rs::VersionSpecifiers; use pixi_pypi_spec::{PixiPypiSpec, PypiPackageName}; use pixi_spec::PixiSpec; -use pyproject_toml::{self, Contact, ResolveError}; +use pyproject_toml::{self, Contact}; use rattler_conda_types::{PackageName, ParseStrictness::Lenient, VersionSpec}; use super::{ @@ -106,8 +106,11 @@ impl PyProjectManifest { /// dependencies and/or dependency groups: /// - one environment is created per group with the same name /// - each environment includes the feature of the same name - pub fn environments_from_groups(self) -> Result>, ResolveError> { - let resolved = self.project.into_inner().resolve()?; + pub fn environments_from_groups( + self, + working_dir: &Path, + ) -> Result>, TomlError> { + let resolved = self.project.into_inner(working_dir)?.resolve()?; let mut groups = resolved.optional_dependencies; groups.extend(resolved.dependency_groups); @@ -133,9 +136,9 @@ impl PyProjectManifest { pub fn into_package_manifest( self, workspace: &WorkspaceManifest, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result<(PackageManifest, Vec), TomlError> { - let (pixi, _, package_defaults) = self.load_pixi_and_defaults()?; + let (pixi, _, package_defaults) = self.load_pixi_and_defaults(root_directory)?; pixi.into_package_manifest( workspace.workspace_package_properties(), @@ -148,6 +151,7 @@ impl PyProjectManifest { /// Helper function to load the `[tool.pixi]` manifest and package defaults. fn load_pixi_and_defaults( self, + working_dir: &Path, ) -> Result<(TomlManifest, pyproject_toml::PyProjectToml, PackageDefaults), TomlError> { // Load the data nested under '[tool.pixi]' as pixi manifest let Some(Tool { @@ -159,7 +163,7 @@ impl PyProjectManifest { }; let poetry = poetry.unwrap_or_default(); - let pyproject = self.project.into_inner(); + let pyproject = self.project.into_inner(working_dir)?; let package_defaults = get_package_defaults(&pyproject, &poetry); Ok((pixi, pyproject, package_defaults)) @@ -168,9 +172,9 @@ impl PyProjectManifest { #[allow(clippy::result_large_err)] pub fn into_workspace_manifest( self, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result<(WorkspaceManifest, Option, Vec), TomlError> { - let (pixi, pyproject, package_defaults) = self.load_pixi_and_defaults()?; + let (pixi, pyproject, package_defaults) = self.load_pixi_and_defaults(root_directory)?; let resolved = pyproject.resolve()?; let mut groups = resolved.optional_dependencies; groups.extend(resolved.dependency_groups); @@ -314,6 +318,7 @@ fn version_or_url_to_spec( #[cfg(test)] mod tests { + use std::path::Path; use std::str::FromStr; use pep440_rs::VersionSpecifiers; @@ -509,7 +514,7 @@ mod tests { let manifest = super::PyProjectManifest::from_toml_str(PYPROJECT_RECURSIVE_OPTIONALS).unwrap(); - let (workspace_manifest, _, _) = manifest.into_workspace_manifest(None).unwrap(); + let (workspace_manifest, _, _) = manifest.into_workspace_manifest(Path::new("")).unwrap(); let feature = workspace_manifest .feature(&FeatureName::from("all")) @@ -567,7 +572,7 @@ mod tests { let manifest = super::PyProjectManifest::from_toml_str(PYPROJECT_OPTIONAL_DEPENDENCIES).unwrap(); - let (workspace_manifest, _, _) = manifest.into_workspace_manifest(None).unwrap(); + let (workspace_manifest, _, _) = manifest.into_workspace_manifest(Path::new("")).unwrap(); let feature = workspace_manifest .feature(&FeatureName::from("all")) diff --git a/crates/pixi_manifest/src/target.rs b/crates/pixi_manifest/src/target.rs index d34221c037..3e230cf783 100644 --- a/crates/pixi_manifest/src/target.rs +++ b/crates/pixi_manifest/src/target.rs @@ -676,13 +676,13 @@ mod tests { use itertools::Itertools; use pixi_spec::PixiSpec; use rattler_conda_types::{PackageName, VersionSpec}; - use std::str::FromStr; + use std::{path::Path, str::FromStr}; use crate::{DependencyOverwriteBehavior, FeatureName, SpecType, WorkspaceManifest}; #[test] fn test_targets_overwrite_order() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [project] name = "test" @@ -701,6 +701,7 @@ mod tests { run = "3.0" host = "1.0" "#, + Path::new(""), ) .unwrap(); @@ -739,7 +740,9 @@ mod tests { foo = "1.0" "#; - let mut manifest = WorkspaceManifest::from_toml_str(manifest_content).unwrap(); + let mut manifest = + WorkspaceManifest::from_toml_str_with_base_dir(manifest_content, Path::new("")) + .unwrap(); let mut document = ManifestDocument::empty_pixi(); // Create a mutable context @@ -789,7 +792,9 @@ mod tests { platforms = [] "#; - let mut manifest = WorkspaceManifest::from_toml_str(manifest_content).unwrap(); + let mut manifest = + WorkspaceManifest::from_toml_str_with_base_dir(manifest_content, Path::new("")) + .unwrap(); let mut document = ManifestDocument::empty_pixi(); let mut manifest_mut = WorkspaceManifestMut { @@ -871,7 +876,9 @@ mod tests { foo = "1.0" "#; - let mut manifest = WorkspaceManifest::from_toml_str(manifest_content).unwrap(); + let mut manifest = + WorkspaceManifest::from_toml_str_with_base_dir(manifest_content, Path::new("")) + .unwrap(); let mut document = ManifestDocument::empty_pixi(); let mut manifest_mut = WorkspaceManifestMut { @@ -913,7 +920,7 @@ mod tests { fn test_target_specific_overrides_default() { use rattler_conda_types::Platform; - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [project] name = "test" @@ -926,6 +933,7 @@ mod tests { [target.linux-64.dependencies] foo = "2.0" "#, + Path::new(""), ) .unwrap(); diff --git a/crates/pixi_manifest/src/toml/manifest.rs b/crates/pixi_manifest/src/toml/manifest.rs index 2b4751d1cc..51ab9fa5d4 100644 --- a/crates/pixi_manifest/src/toml/manifest.rs +++ b/crates/pixi_manifest/src/toml/manifest.rs @@ -97,7 +97,7 @@ impl TomlManifest { external: WorkspacePackageProperties, package_defaults: PackageDefaults, workspace: &WorkspaceManifest, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result<(PackageManifest, Vec), TomlError> { let Some(PixiSpanned { value: package, @@ -146,7 +146,7 @@ impl TomlManifest { self, mut external: ExternalWorkspaceProperties, package_defaults: PackageDefaults, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result<(WorkspaceManifest, Option, Vec), TomlError> { let workspace = self .workspace @@ -641,7 +641,7 @@ mod test { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - None, + Path::new(""), ) }) .expect_err("parsing should fail"); @@ -668,7 +668,7 @@ mod test { #[test] fn test_workspace_name_from_package() { - let workspace_manifest = WorkspaceManifest::from_toml_str( + let workspace_manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] channels = [] @@ -682,6 +682,7 @@ mod test { [package.build] backend = { name = "foobar", version = "*" } "#, + Path::new(""), ) .unwrap(); @@ -1034,7 +1035,7 @@ mod test { #[test] fn test_parse_dev_path() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] name = "test" @@ -1044,6 +1045,7 @@ mod test { [dev] test-package = { path = "../test-package" } "#, + Path::new(""), ) .unwrap(); @@ -1058,7 +1060,7 @@ mod test { #[test] fn test_parse_dev_git() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] name = "test" @@ -1068,6 +1070,7 @@ mod test { [dev] my-lib = { git = "https://github.com/example/my-lib.git", branch = "main" } "#, + Path::new(""), ) .unwrap(); @@ -1082,7 +1085,7 @@ mod test { #[test] fn test_parse_dev_multiple() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] name = "test" @@ -1094,6 +1097,7 @@ mod test { pkg-b = { git = "https://github.com/example/pkg-b.git" } pkg-c = { url = "https://example.com/pkg-c.tar.gz" } "#, + Path::new(""), ) .unwrap(); @@ -1110,7 +1114,7 @@ mod test { #[test] fn test_parse_feature_dev() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] name = "test" @@ -1124,6 +1128,7 @@ mod test { default = [] extra = ["extra"] "#, + Path::new(""), ) .unwrap(); @@ -1144,7 +1149,7 @@ mod test { #[test] fn test_parse_target_dev() { - let manifest = WorkspaceManifest::from_toml_str( + let manifest = WorkspaceManifest::from_toml_str_with_base_dir( r#" [workspace] name = "test" @@ -1157,6 +1162,7 @@ mod test { [target.win-64.dev] windows-pkg = { path = "../windows-pkg" } "#, + Path::new(""), ) .unwrap(); diff --git a/crates/pixi_manifest/src/toml/package.rs b/crates/pixi_manifest/src/toml/package.rs index 7bf5dcc185..a836bd1ee5 100644 --- a/crates/pixi_manifest/src/toml/package.rs +++ b/crates/pixi_manifest/src/toml/package.rs @@ -311,7 +311,7 @@ impl TomlPackage { workspace: WorkspacePackageProperties, package_defaults: PackageDefaults, preview: &Preview, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result, TomlError> { let mut warnings = Vec::new(); @@ -413,13 +413,12 @@ impl TomlPackage { (Some(pixi_spec::SourceLocationSpec::Git(_)), _) | (Some(pixi_spec::SourceLocationSpec::Url(_)), _) => None, // Path source: resolve the path and use that directory for validation - (Some(pixi_spec::SourceLocationSpec::Path(path_spec)), Some(root_dir)) => { + (Some(pixi_spec::SourceLocationSpec::Path(path_spec)), root_dir) => { path_spec.resolve(root_dir).ok() } // No source: use the manifest directory - (None, Some(root_dir)) => Some(root_dir.to_path_buf()), + (None, root_dir) => Some(root_dir.to_path_buf()), // No root directory provided: skip validation - (_, None) => None, }; let license_file = check_resolved_file( @@ -549,7 +548,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }) .unwrap_err(); @@ -581,7 +580,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }) .unwrap_err(); @@ -620,7 +619,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap(); assert_eq!(manifest.value.package.name.unwrap(), "workspace-name"); @@ -674,7 +673,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap(); assert_eq!(manifest.value.package.name.unwrap(), "workspace-name"); @@ -711,7 +710,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error)); @@ -739,7 +738,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error)); @@ -765,7 +764,12 @@ mod test { }; let manifest = package - .into_manifest(workspace, package_defaults, &Preview::default(), None) + .into_manifest( + workspace, + package_defaults, + &Preview::default(), + Path::new(""), + ) .unwrap(); // Should use package defaults for name and version assert_eq!(manifest.value.package.name.unwrap(), "default-name"); @@ -806,7 +810,12 @@ mod test { }; let manifest = package - .into_manifest(workspace, package_defaults, &Preview::default(), None) + .into_manifest( + workspace, + package_defaults, + &Preview::default(), + Path::new(""), + ) .unwrap(); // Should use workspace values for name and version (overrides defaults) assert_eq!(manifest.value.package.name.unwrap(), "workspace-name"); @@ -842,7 +851,12 @@ mod test { }; let parse_error = package - .into_manifest(workspace, package_defaults, &Preview::default(), None) + .into_manifest( + workspace, + package_defaults, + &Preview::default(), + Path::new(""), + ) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error)); } @@ -870,7 +884,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap(); @@ -896,7 +910,7 @@ mod test { workspace, PackageDefaults::default(), &Preview::default(), - None, + Path::new(""), ) .unwrap(); @@ -925,7 +939,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); @@ -952,7 +966,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); @@ -977,7 +991,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); @@ -1003,7 +1017,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(path), + path, ) }); assert!(result.is_err(), "Expected failure for path source"); @@ -1031,7 +1045,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(temp_dir.path()), + temp_dir.path(), ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); @@ -1073,7 +1087,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(manifest_dir.path()), + manifest_dir.path(), ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); @@ -1105,7 +1119,7 @@ mod test { WorkspacePackageProperties::default(), PackageDefaults::default(), &Preview::default(), - Some(temp_dir.path()), + temp_dir.path(), ) }); assert!(result.is_ok(), "Expected success but got: {result:?}"); diff --git a/crates/pixi_manifest/src/toml/pyproject.rs b/crates/pixi_manifest/src/toml/pyproject.rs index 0348faa020..f4f9d14683 100644 --- a/crates/pixi_manifest/src/toml/pyproject.rs +++ b/crates/pixi_manifest/src/toml/pyproject.rs @@ -1,7 +1,7 @@ //! This module provides [`toml_span`] parsing functionality for //! `pyproject.toml` files. -use std::str::FromStr; +use std::path::Path; use indexmap::IndexMap; use pep440_rs::{Version, VersionSpecifiers}; @@ -17,6 +17,7 @@ use toml_span::{ value::ValueInner, }; +use crate::error::{GenericError, TomlError}; use crate::pyproject::{PyProjectManifest, Tool, ToolPoetry}; #[derive(Debug)] @@ -27,15 +28,22 @@ pub struct PyProjectToml { } impl PyProjectToml { - pub fn into_inner(self) -> pyproject_toml::PyProjectToml { - pyproject_toml::PyProjectToml { - project: self.project.map(TomlProject::into_inner), + pub fn into_inner( + self, + working_dir: &Path, + ) -> Result { + Ok(pyproject_toml::PyProjectToml { + project: self + .project + .map(|p| p.into_inner(working_dir)) + .transpose()?, build_system: self.build_system.map(TomlBuildSystem::into_inner), dependency_groups: self .dependency_groups .map(Spanned::take) - .map(TomlDependencyGroups::into_inner), - } + .map(|dg| dg.into_inner(working_dir)) + .transpose()?, + }) } } @@ -48,7 +56,6 @@ impl<'de> toml_span::Deserialize<'de> for PyProjectToml { let dependency_groups = th.optional("dependency-groups"); th.finalize(Some(value))?; - Ok(PyProjectToml { project, build_system, @@ -165,18 +172,44 @@ pub struct TomlProject { pub scripts: Option>>, /// Corresponds to the gui_scripts group in the core metadata pub gui_scripts: Option>>, - /// Project dependencies - pub dependencies: Option>>, - /// Optional dependencies - pub optional_dependencies: Option>>>, + /// Project dependencies (stored as raw strings, parsed later with working_dir) + pub dependencies: Option>>, + /// Optional dependencies (stored as raw strings, parsed later with working_dir) + pub optional_dependencies: Option>>>, /// Specifies which fields listed by PEP 621 were intentionally unspecified /// so another tool can/will provide such metadata dynamically. pub dynamic: Option>>, } impl TomlProject { - pub fn into_inner(self) -> Project { - Project { + pub fn into_inner(self, working_dir: &Path) -> Result { + let dependencies = self + .dependencies + .map(|deps| { + deps.into_iter() + .map(|s| parse_requirement_with_dir(&s, working_dir)) + .inspect(|v| eprintln!("Debug VersionOrUrl: {v:?}")) + .collect::, _>>() + }) + .transpose()?; + + let optional_dependencies = self + .optional_dependencies + .map(|opt_deps| { + opt_deps + .into_iter() + .map(|(key, deps)| { + let parsed = deps + .into_iter() + .map(|s| parse_requirement_with_dir(&s, working_dir)) + .collect::, _>>()?; + Ok((key, parsed)) + }) + .collect::, TomlError>>() + }) + .transpose()?; + + Ok(Project { name: self.name.take(), version: self.version.map(Spanned::take), description: self.description.map(Spanned::take), @@ -224,19 +257,12 @@ impl TomlProject { .map(|(k, v)| (k, v.take())) .collect() }), - dependencies: self - .dependencies - .map(|dependencies| dependencies.into_iter().map(Spanned::take).collect()), - optional_dependencies: self.optional_dependencies.map(|optional_dependencies| { - optional_dependencies - .into_iter() - .map(|(k, v)| (k, v.into_iter().map(Spanned::take).collect())) - .collect() - }), + dependencies, + optional_dependencies, dynamic: self .dynamic .map(|dynamic| dynamic.into_iter().map(Spanned::take).collect()), - } + }) } } @@ -273,13 +299,9 @@ impl<'de> toml_span::Deserialize<'de> for TomlProject { let gui_scripts = th .optional::>("gui-scripts") .map(TomlIndexMap::into_inner); - let dependencies = th - .optional::>>>>("dependencies") - .map(TomlWith::into_inner); + let dependencies: Option>> = th.optional("dependencies"); let optional_dependencies = th - .optional::>>>>>( - "optional-dependencies", - ) + .optional::>>>>("optional-dependencies") .map(TomlWith::into_inner); let dynamic = th.optional("dynamic"); @@ -308,10 +330,17 @@ impl<'de> toml_span::Deserialize<'de> for TomlProject { } } -impl<'de> DeserializeAs<'de, Project> for TomlProject { - fn deserialize_as(value: &mut Value<'de>) -> Result { - Self::deserialize(value).map(Self::into_inner) - } +/// Parse a raw PEP 508 string into a [`Requirement`], optionally using a +/// working directory to resolve relative paths. +fn parse_requirement_with_dir( + spanned: &Spanned, + working_dir: &Path, +) -> Result { + Requirement::parse(&spanned.value, working_dir).map_err(|e| { + GenericError::new(e.message.to_string()) + .with_span(spanned.span.start..spanned.span.end) + .into() + }) } /// A wrapper around [`ReadMe`] that implements [`toml_span::Deserialize`] and @@ -442,60 +471,64 @@ impl<'de> DeserializeAs<'de, Contact> for TomlContact { } } -/// A wrapper around [`DependencyGroups`] that implements -/// [`toml_span::Deserialize`] and [`pixi_toml::DeserializeAs`]. +/// Intermediate representation of `[dependency-groups]` that stores requirement +/// strings unparsed. The strings are resolved into [`Requirement`] objects in +/// [`TomlDependencyGroups::into_inner`] where a working directory can be +/// provided. #[derive(Debug)] -pub struct TomlDependencyGroups(pub DependencyGroups); +pub struct TomlDependencyGroups(pub IndexMap>); impl TomlDependencyGroups { - pub fn into_inner(self) -> DependencyGroups { - self.0 + pub fn into_inner(self, working_dir: &Path) -> Result { + let mut groups = IndexMap::new(); + for (name, specifiers) in self.0 { + let parsed = specifiers + .into_iter() + .map(|spec| match spec { + TomlDependencyGroupSpecifier::String(spanned) => { + let req = parse_requirement_with_dir(&spanned, working_dir)?; + Ok(DependencyGroupSpecifier::String(req)) + } + TomlDependencyGroupSpecifier::Table { include_group } => { + Ok(DependencyGroupSpecifier::Table { include_group }) + } + }) + .collect::, TomlError>>()?; + groups.insert(name, parsed); + } + Ok(DependencyGroups(groups)) } } impl<'de> toml_span::Deserialize<'de> for TomlDependencyGroups { fn deserialize(value: &mut Value<'de>) -> Result { - Ok(Self(DependencyGroups( - TomlWith::<_, TomlIndexMap<_, Vec>>::deserialize(value)? - .into_inner(), - ))) + let map = TomlIndexMap::>::deserialize(value)?; + Ok(Self(map.into_inner())) } } -impl<'de> DeserializeAs<'de, DependencyGroups> for TomlDependencyGroups { - fn deserialize_as(value: &mut Value<'de>) -> Result { - Self::deserialize(value).map(Self::into_inner) - } -} - -/// A wrapper around [`DependencyGroupSpecifier`] that implements -/// [`toml_span::Deserialize`] and [`pixi_toml::DeserializeAs`]. +/// Intermediate representation of a dependency group specifier that stores +/// requirement strings unparsed. #[derive(Debug)] -pub struct TomlDependencyGroupSpecifier(DependencyGroupSpecifier); - -impl TomlDependencyGroupSpecifier { - pub fn into_inner(self) -> DependencyGroupSpecifier { - self.0 - } +pub enum TomlDependencyGroupSpecifier { + /// Raw PEP 508 string, parsed later with working_dir context + String(Spanned), + /// Include another dependency group + Table { include_group: String }, } impl<'de> toml_span::Deserialize<'de> for TomlDependencyGroupSpecifier { fn deserialize(value: &mut Value<'de>) -> Result { + let span = value.span; match value.take() { - ValueInner::String(str) => Ok(Self(DependencyGroupSpecifier::String( - Requirement::from_str(&str).map_err(|e| { - DeserError::from(Error { - kind: ErrorKind::Custom(e.message.to_string().into()), - span: value.span, - line_info: None, - }) - })?, - ))), + ValueInner::String(str) => Ok(TomlDependencyGroupSpecifier::String( + Spanned::with_span(str.into_owned(), span), + )), ValueInner::Table(table) => { let mut th = TableHelper::from((table, value.span)); let include_group = th.required("include-group")?; th.finalize(None)?; - Ok(Self(DependencyGroupSpecifier::Table { include_group })) + Ok(TomlDependencyGroupSpecifier::Table { include_group }) } inner => Err(DeserError::from(expected( "a string or table", @@ -506,12 +539,6 @@ impl<'de> toml_span::Deserialize<'de> for TomlDependencyGroupSpecifier { } } -impl<'de> DeserializeAs<'de, DependencyGroupSpecifier> for TomlDependencyGroupSpecifier { - fn deserialize_as(value: &mut Value<'de>) -> Result { - Self::deserialize(value).map(Self::into_inner) - } -} - impl<'de> Deserialize<'de> for ToolPoetry { fn deserialize(value: &mut Value<'de>) -> Result { let mut th = TableHelper::new(value)?; diff --git a/crates/pixi_manifest/src/toml/workspace.rs b/crates/pixi_manifest/src/toml/workspace.rs index a5c8e2bbce..f92c54ae30 100644 --- a/crates/pixi_manifest/src/toml/workspace.rs +++ b/crates/pixi_manifest/src/toml/workspace.rs @@ -67,7 +67,7 @@ impl TomlWorkspace { pub fn into_workspace( self, external: ExternalWorkspaceProperties, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result, TomlError> { if let Some(Spanned { value: license, @@ -84,8 +84,8 @@ impl TomlWorkspace { } let check_file_existence = |path: &Option>| { - if let (Some(root_directory), Some(Spanned { span, value: path })) = - (root_directory, path) + if !root_directory.as_os_str().is_empty() + && let Some(Spanned { span, value: path }) = path { let full_path = root_directory.join(path); if !full_path.is_file() { @@ -154,7 +154,7 @@ impl TomlWorkspace { fn convert_build_variant_files( entries: Option>>>, - root_directory: Option<&Path>, + root_directory: &Path, ) -> Result, TomlError> { if let Some(entries) = entries { entries @@ -167,7 +167,7 @@ fn convert_build_variant_files( Some(span.into()) }; - if let Some(root_directory) = root_directory { + if !root_directory.as_os_str().is_empty() { let full_path = root_directory.join(&path); if !full_path.is_file() { return Err(TomlError::from( @@ -322,12 +322,12 @@ mod test { platforms = [] license-file = "LICENSE.txt" "#; - let path = Path::new(""); + let path = Path::new("/nonexistent"); let parse_error = TomlWorkspace::from_toml_str(input) - .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), Some(path))) + .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), path)) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error), @r###" - × 'LICENSE.txt' does not exist + × '/nonexistent/LICENSE.txt' does not exist ╭─[pixi.toml:4:25] 3 │ platforms = [] 4 │ license-file = "LICENSE.txt" @@ -344,12 +344,12 @@ mod test { platforms = [] readme = "README.md" "#; - let path = Path::new(""); + let path = Path::new("/nonexistent"); let parse_error = TomlWorkspace::from_toml_str(input) - .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), Some(path))) + .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), path)) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error), @r###" - × 'README.md' does not exist + × '/nonexistent/README.md' does not exist ╭─[pixi.toml:4:19] 3 │ platforms = [] 4 │ readme = "README.md" @@ -366,12 +366,12 @@ mod test { platforms = [] build-variants-files = ["missing.yaml"] "#; - let path = Path::new(""); + let path = Path::new("/nonexistent"); let parse_error = TomlWorkspace::from_toml_str(input) - .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), Some(path))) + .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), path)) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error), @r#" - × 'missing.yaml' does not exist + × '/nonexistent/missing.yaml' does not exist ╭─[pixi.toml:4:34] 3 │ platforms = [] 4 │ build-variants-files = ["missing.yaml"] @@ -390,10 +390,10 @@ mod test { "#; let path = Path::new(""); let parse_error = TomlWorkspace::from_toml_str(input) - .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), Some(path))) + .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), path)) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error), @r#" - × `date` is neither a valid date (input contains invalid characters) nor a valid datetime (premature end of input) + × `date` is neither a valid date (input contains invalid characters) nor a valid datetime (input contains invalid characters) ╭─[pixi.toml:4:26] 3 │ platforms = [] 4 │ exclude-newer = "date" diff --git a/crates/pixi_manifest/src/utils/test_utils.rs b/crates/pixi_manifest/src/utils/test_utils.rs index 1442e50c3c..5517130fe3 100644 --- a/crates/pixi_manifest/src/utils/test_utils.rs +++ b/crates/pixi_manifest/src/utils/test_utils.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use crate::toml::{ExternalWorkspaceProperties, PackageDefaults}; use crate::toml::{FromTomlStr, TomlManifest}; use itertools::Itertools; @@ -12,7 +14,7 @@ pub(crate) fn expect_parse_failure(pixi_toml: &str) -> String { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - None, + Path::new(""), ) }) .expect_err("parsing should fail"); @@ -28,7 +30,7 @@ pub(crate) fn expect_parse_warnings(pixi_toml: &str) -> String { manifest.into_workspace_manifest( ExternalWorkspaceProperties::default(), PackageDefaults::default(), - None, + Path::new(""), ) }) { Ok((_, _, warnings)) => warnings diff --git a/crates/pixi_pypi_spec/src/lib.rs b/crates/pixi_pypi_spec/src/lib.rs index 3717cfdc21..216929b1bf 100644 --- a/crates/pixi_pypi_spec/src/lib.rs +++ b/crates/pixi_pypi_spec/src/lib.rs @@ -42,7 +42,7 @@ pub enum PixiPypiSource { }, /// From a local file system path (directory or file). Path { - path: PathBuf, + path: pixi_spec::Verbatim, #[serde(default, skip_serializing_if = "Option::is_none")] editable: Option, }, @@ -58,7 +58,7 @@ impl PixiPypiSource { /// Returns the path if this is a Path source. pub fn as_path(&self) -> Option<&PathBuf> { match self { - PixiPypiSource::Path { path, .. } => Some(path), + PixiPypiSource::Path { path, .. } => Some(path.inner()), _ => None, } } @@ -361,7 +361,10 @@ mod tests { #[test] fn test_is_source_dependency_for_path() { let spec = PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("./local"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("./local"), + "./local".to_string(), + ), editable: None, }); assert!(spec.is_source_dependency()); @@ -439,7 +442,10 @@ mod tests { #[test] fn test_source_accessor() { let spec = PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("./local"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("./local"), + "./local".to_string(), + ), editable: Some(true), }); @@ -465,7 +471,10 @@ mod tests { #[test] fn test_as_version_returns_none_for_non_registry() { let spec = PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("./local"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("./local"), + "./local".to_string(), + ), editable: None, }); assert!(spec.as_version().is_none()); @@ -489,7 +498,10 @@ mod tests { // Non-registry source let spec = PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("./local"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("./local"), + "./local".to_string(), + ), editable: None, }); assert!(spec.index().is_none()); @@ -498,7 +510,10 @@ mod tests { #[test] fn test_from_source_conversion() { let source = PixiPypiSource::Path { - path: PathBuf::from("./local"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("./local"), + "./local".to_string(), + ), editable: Some(true), }; let spec: PixiPypiSpec = source.clone().into(); @@ -609,7 +624,10 @@ mod tests { assert_eq!( as_pypi_req, PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("/path/to/boltons"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("/path/to/boltons"), + "/path/to/boltons".to_string(), + ), editable: None, }) ); diff --git a/crates/pixi_pypi_spec/src/pep508.rs b/crates/pixi_pypi_spec/src/pep508.rs index c47776bab3..8ccb11c0bb 100644 --- a/crates/pixi_pypi_spec/src/pep508.rs +++ b/crates/pixi_pypi_spec/src/pep508.rs @@ -1,7 +1,7 @@ use crate::utils::extract_directory_from_url; use crate::{Pep508ToPyPiRequirementError, PixiPypiSource, PixiPypiSpec, VersionOrStar}; use pixi_git::GitUrl; -use pixi_spec::GitSpec; +use pixi_spec::{GitSpec, Verbatim}; use std::path::Path; /// Implement from [`pep508_rs::Requirement`] to make the conversion easier. @@ -89,9 +89,14 @@ impl TryFrom for PixiPypiSpec { let file = url.to_file_path().map_err(|_| { Pep508ToPyPiRequirementError::PathUrlIntoPath(url.clone()) })?; + let path = if let Some(g) = u.given() { + Verbatim::new_with_given(file, g.to_string()) + } else { + Verbatim::new(file) + }; PixiPypiSpec::with_extras_and_markers( PixiPypiSource::Path { - path: file, + path, editable: None, }, req.extras, diff --git a/crates/pixi_pypi_spec/src/toml.rs b/crates/pixi_pypi_spec/src/toml.rs index 4fb3b441da..18d4cb886b 100644 --- a/crates/pixi_pypi_spec/src/toml.rs +++ b/crates/pixi_pypi_spec/src/toml.rs @@ -127,14 +127,17 @@ impl RawPyPiRequirement { self.extras, self.marker, ), - (None, Some(path), None, None) => PixiPypiSpec::with_extras_and_markers( - PixiPypiSource::Path { - path, - editable: self.editable, - }, - self.extras, - self.marker, - ), + (None, Some(path), None, None) => { + let given = path.display().to_string(); + PixiPypiSpec::with_extras_and_markers( + PixiPypiSource::Path { + path: pixi_spec::Verbatim::new_with_given(path, given), + editable: self.editable, + }, + self.extras, + self.marker, + ) + } (None, None, Some(git), None) => { let rev = match (self.branch, self.rev, self.tag) { (Some(branch), None, None) => Some(GitReference::Branch(branch)), @@ -382,7 +385,9 @@ impl From for toml_edit::Value { table.insert( "path", toml_edit::Value::String(toml_edit::Formatted::new( - path.to_string_lossy().to_string(), + path.given() + .map(|g| g.to_string()) + .unwrap_or_else(|| path.display().to_string()), )), ); if editable == &Some(true) { @@ -606,7 +611,10 @@ mod test { assert_eq!( requirement.first().unwrap().1, &PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("../numpy-test"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("../numpy-test"), + "../numpy-test".to_string(), + ), editable: None, }), ); @@ -621,7 +629,10 @@ mod test { assert_eq!( requirement.first().unwrap().1, &PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("../numpy-test"), + path: pixi_spec::Verbatim::new_with_given( + PathBuf::from("../numpy-test"), + "../numpy-test".to_string(), + ), editable: Some(true), }) ); diff --git a/crates/pixi_record/src/lib.rs b/crates/pixi_record/src/lib.rs index c53f65a1c9..9846223f30 100644 --- a/crates/pixi_record/src/lib.rs +++ b/crates/pixi_record/src/lib.rs @@ -71,6 +71,9 @@ impl PixiRecord { ConversionError::LocationToUrlConversionError(err) => { ParseLockFileError::InvalidRecordUrl(location, err) } + ConversionError::InvalidBinaryPackageLocation => { + ParseLockFileError::InvalidArchiveFilename(location) + } })?) } CondaPackageData::Source(value) => { @@ -139,6 +142,9 @@ pub enum ParseLockFileError { #[error("missing field/fields '{1}' for package {0}")] Missing(UrlOrPath, String), + #[error("Invalid archive file name for package {0}")] + InvalidArchiveFilename(UrlOrPath), + #[error("invalid url for package {0}")] InvalidRecordUrl(UrlOrPath, #[source] file_url::FileURLParseError), diff --git a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap index 7114a995b8..859647d89d 100644 --- a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap +++ b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap @@ -2,93 +2,60 @@ source: crates/pixi_record/src/source_record.rs expression: roundtrip_lock --- -version: 6 +version: 7 +platforms: +- name: noarch environments: default: channels: - url: https://conda.anaconda.org/conda-forge/ packages: noarch: - - conda: git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 - name: git-child-test - - conda: git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 - name: git-sibling-test - - conda: git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 - - conda: /workspace/absolute-recipe - - conda: recipes/my-package - name: path-child-test - - conda: recipes/my-package - name: path-sibling-test - - conda: recipes/no-build + - conda_source: git-child-test[7ed0aa73] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + - conda_source: git-no-manifest-subdir[f0ed072f] @ git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 + - conda_source: git-sibling-test[cb1b107e] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + - conda_source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe + - conda_source: path-child-test[9d062313] @ recipes/my-package + - conda_source: path-no-build-source[e11447f7] @ recipes/no-build + - conda_source: path-sibling-test[2c2ab470] @ recipes/my-package packages: -- conda: git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 - name: git-child-test +- conda_source: git-child-test[7ed0aa73] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 version: 1.1.0 build: h234567_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e - channel: null package_build_source: path: ../src -- conda: git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 - name: git-no-manifest-subdir +- conda_source: git-no-manifest-subdir[f0ed072f] @ git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 version: 3.0.0 build: h901237_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e - channel: null package_build_source: path: build/subdir -- conda: git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 - name: git-sibling-test +- conda_source: git-sibling-test[cb1b107e] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 version: 1.0.0 build: h123456_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e - channel: null package_build_source: path: ../non-nested -- conda: /workspace/absolute-recipe - name: path-absolute-manifest +- source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe version: 2.4.0 build: h901236_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e package_build_source: path: ../src -- conda: recipes/my-package - name: path-child-test +- source: path-child-test[9d062313] @ recipes/my-package version: 2.1.0 build: h890123_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e package_build_source: path: ../../src/lib -- conda: recipes/no-build - name: path-no-build-source +- source: path-no-build-source[e11447f7] @ recipes/no-build version: 2.5.0 build: h901238_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e -- conda: recipes/my-package - name: path-sibling-test +- source: path-sibling-test[2c2ab470] @ recipes/my-package version: 2.0.0 build: h789012_0 subdir: noarch - noarch: false - sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - md5: d41d8cd98f00b204e9800998ecf8427e package_build_source: path: ../../other-package/src diff --git a/crates/pixi_record/src/source_record.rs b/crates/pixi_record/src/source_record.rs index 6cab1ca097..c7726e2e10 100644 --- a/crates/pixi_record/src/source_record.rs +++ b/crates/pixi_record/src/source_record.rs @@ -81,7 +81,7 @@ pub struct SourceRecord { pub build_source: Option, /// The variants that uniquely identify the way this package was built. - pub variants: Option>, + pub variants: BTreeMap, /// Specifies which packages are expected to be installed as source packages /// and from which location. @@ -132,8 +132,6 @@ impl SourceRecord { package_record: self.package_record, location: self.manifest_source.clone().into(), package_build_source, - // Don't write input_hash to lock file - input: None, sources: self .sources .into_iter() @@ -141,7 +139,9 @@ impl SourceRecord { .collect(), variants: self .variants - .map(|variants| variants.into_iter().map(|(k, v)| (k, v.into())).collect()), + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(), } } @@ -210,12 +210,11 @@ impl SourceRecord { .into_iter() .map(|(k, v)| (k, SourceLocationSpec::from(v))) .collect(), - variants: data.variants.map(|variants| { - variants - .into_iter() - .map(|(k, v)| (k, VariantValue::from(v))) - .collect() - }), + variants: data + .variants + .into_iter() + .map(|(k, v)| (k, VariantValue::from(v))) + .collect(), }) } @@ -227,17 +226,11 @@ impl SourceRecord { return false; } - match (&self.variants, &other.variants) { - (Some(variants), Some(other_variants)) => { - // If both records have variants, we use that to identify them. - variants == other_variants - } - _ => { - self.package_record.build == other.package_record.build - && self.package_record.version == other.package_record.version - && self.package_record.subdir == other.package_record.subdir - } + if self.variants.is_empty() || other.variants.is_empty() { + return true; } + + self.variants == other.variants } } @@ -300,7 +293,7 @@ fn git_reference_from_shallow(spec: Option, rev: &str) -> GitRef #[cfg(test)] mod tests { use super::*; - use std::str::FromStr; + use std::{path::Path, str::FromStr}; use rattler_conda_types::Platform; use rattler_lock::{ @@ -309,11 +302,13 @@ mod tests { #[test] fn roundtrip_conda_source_data() { - let workspace_root = std::path::Path::new("/workspace"); + let workspace_root = Path::new("/workspace"); // Load the lock file from the snapshot content (skip insta frontmatter). let lock_source = lock_source_from_snapshot(); - let lock_file = LockFile::from_str(&lock_source).expect("failed to load lock file fixture"); + let lock_file = + LockFile::from_str_with_base_directory(&lock_source, Some(Path::new("/workspace"))) + .expect("failed to load lock file fixture"); // Extract Conda source packages from the lock file. let environment = lock_file @@ -344,7 +339,7 @@ mod tests { /// Extract the lock file body from the snapshot by skipping the insta frontmatter. fn lock_source_from_snapshot() -> String { - let snapshot_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join( + let snapshot_path = Path::new(env!("CARGO_MANIFEST_DIR")).join( "src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap", ); #[allow(clippy::disallowed_methods)] @@ -358,11 +353,30 @@ mod tests { } /// Build a lock file string from a set of SourceRecords. - fn build_lock_from_records( - records: &[SourceRecord], - workspace_root: &std::path::Path, - ) -> String { - let mut builder = LockFileBuilder::new(); + fn build_lock_from_records(records: &[SourceRecord], workspace_root: &Path) -> String { + // Collect all unique platforms from the records + let platforms: std::collections::HashSet = records + .iter() + .map(|r| { + let conda_data = + CondaPackageData::from(r.clone().into_conda_source_data(workspace_root)); + Platform::from_str(&conda_data.record().subdir) + .expect("failed to parse platform from subdir") + }) + .collect(); + + let mut builder = LockFileBuilder::new() + .with_platforms( + platforms + .iter() + .map(|p| rattler_lock::PlatformData { + name: rattler_lock::PlatformName::from(p), + subdir: *p, + virtual_packages: Vec::new(), + }) + .collect(), + ) + .expect("platforms should be unique"); builder.set_channels( DEFAULT_ENVIRONMENT_NAME, [Channel::from("https://conda.anaconda.org/conda-forge/")], @@ -374,7 +388,9 @@ mod tests { let platform = Platform::from_str(&conda_data.record().subdir) .expect("failed to parse platform from subdir"); - builder.add_conda_package(DEFAULT_ENVIRONMENT_NAME, platform, conda_data); + builder + .add_conda_package(DEFAULT_ENVIRONMENT_NAME, &platform.to_string(), conda_data) + .expect("platform was registered"); } builder diff --git a/crates/pixi_spec/src/lib.rs b/crates/pixi_spec/src/lib.rs index d411a552c7..ef3d17de22 100644 --- a/crates/pixi_spec/src/lib.rs +++ b/crates/pixi_spec/src/lib.rs @@ -28,6 +28,7 @@ use rattler_conda_types::{ BuildNumberSpec, ChannelConfig, MatchSpecCondition, NamedChannelOrUrl, NamelessMatchSpec, ParseChannelError, StringMatcher, VersionSpec, }; +pub use rattler_lock::Verbatim; pub use source_anchor::SourceAnchor; pub use subdirectory::{Subdirectory, SubdirectoryError}; use thiserror::Error; @@ -134,7 +135,7 @@ impl PixiSpec { url, md5: spec.md5, sha256: spec.sha256, - // A namelessmatchspec always describes a binary spec which cannot have a + // A nameless matchspec always describes a binary spec which cannot have a // subdirectory subdirectory: Subdirectory::default(), }) diff --git a/crates/pixi_uv_context/src/lib.rs b/crates/pixi_uv_context/src/lib.rs index 092698877e..fe0dfaddae 100644 --- a/crates/pixi_uv_context/src/lib.rs +++ b/crates/pixi_uv_context/src/lib.rs @@ -128,17 +128,19 @@ impl UvResolutionContext { /// - `index_locations`: The index locations to use /// - `index_strategy`: The index strategy to use /// - `markers`: Optional marker environment for platform-specific resolution + /// - `connectivity`: Whether to allow network access pub fn build_registry_client( &self, allow_insecure_hosts: Vec, index_locations: &IndexLocations, index_strategy: IndexStrategy, markers: Option<&MarkerEnvironment>, + connectivity: Connectivity, ) -> Arc { let mut base_client_builder = BaseClientBuilder::default() .allow_insecure_host(allow_insecure_hosts) .keyring(self.keyring_provider) - .connectivity(Connectivity::Online) + .connectivity(connectivity) .native_tls(self.use_native_tls) .built_in_root_certs(self.use_builtin_certs) .extra_middleware(self.extra_middleware.clone()); diff --git a/crates/pixi_uv_conversions/src/requirements.rs b/crates/pixi_uv_conversions/src/requirements.rs index d2c0225ed0..87762e98b8 100644 --- a/crates/pixi_uv_conversions/src/requirements.rs +++ b/crates/pixi_uv_conversions/src/requirements.rs @@ -157,17 +157,17 @@ pub fn as_uv_req( } } PixiPypiSource::Path { path, editable } => { - let joined = project_root.join(path); + let joined = project_root.join(path.inner()); let canonicalized = dunce::canonicalize(&joined).map_err(|e| AsPep508Error::CanonicalizeError { source: e, path: joined.clone(), })?; let given = path - .to_str() + .given() .map(|s| s.to_owned()) - .unwrap_or_else(String::new); - let verbatim = VerbatimUrl::from_path(path, project_root)?.with_given(given); + .unwrap_or_else(|| path.inner().display().to_string()); + let verbatim = VerbatimUrl::from_path(path.inner(), project_root)?.with_given(given); if canonicalized.is_dir() { RequirementSource::Directory { @@ -180,7 +180,7 @@ pub fn as_uv_req( editable: Some(false), url: verbatim, // TODO: we could see if we ever need this - // AFAICS it would be useful for constrainging dependencies + // AFAICS it would be useful for constraining dependencies r#virtual: Some(false), } } else if *editable == Some(true) { @@ -193,7 +193,7 @@ pub fn as_uv_req( RequirementSource::Path { install_path: canonicalized.into_boxed_path(), url: verbatim, - ext: DistExtension::from_path(path)?, + ext: DistExtension::from_path(path.inner())?, } } } diff --git a/docs/source_files/pixi_config_tomls/main_config.toml b/docs/source_files/pixi_config_tomls/main_config.toml index 16a5955431..a94a93c9e1 100644 --- a/docs/source_files/pixi_config_tomls/main_config.toml +++ b/docs/source_files/pixi_config_tomls/main_config.toml @@ -44,7 +44,6 @@ tool-platform = "win-64" # force tools like build backends to be installed for a # This should only be used for specific old versions of artifactory and other non-compliant # servers. disable-bzip2 = true # don't try to download repodata.json.bz2 -disable-jlap = true # don't try to download repodata.jlap [default: true] disable-sharded = true # don't try to download sharded repodata disable-zstd = true # don't try to download repodata.json.zst # --8<-- [end:repodata-config] From 91ecc4c6346f9c1b964f9213f1aea130ac26f2da Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Wed, 4 Mar 2026 16:59:01 +0100 Subject: [PATCH 03/15] wip: updated --- Cargo.lock | 1 + crates/pixi_api/Cargo.toml | 1 + crates/pixi_api/src/workspace/list/mod.rs | 15 ++++--- crates/pixi_api/src/workspace/list/package.rs | 44 ++++++++++++------- crates/pixi_cli/src/list.rs | 2 +- .../src/lock_file/satisfiability/mod.rs | 4 +- 6 files changed, 42 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77f9a68077..e420550f6f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5680,6 +5680,7 @@ dependencies = [ "uv-distribution", "uv-distribution-types", "uv-normalize", + "uv-pypi-types", "uv-types", ] diff --git a/crates/pixi_api/Cargo.toml b/crates/pixi_api/Cargo.toml index c9d961d7f3..1603c90b50 100644 --- a/crates/pixi_api/Cargo.toml +++ b/crates/pixi_api/Cargo.toml @@ -38,5 +38,6 @@ tracing = { workspace = true } url = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } +uv-pypi-types = { workspace = true } uv-normalize = { workspace = true } uv-types = { workspace = true } diff --git a/crates/pixi_api/src/workspace/list/mod.rs b/crates/pixi_api/src/workspace/list/mod.rs index bf617b3cae..a3f2591a49 100644 --- a/crates/pixi_api/src/workspace/list/mod.rs +++ b/crates/pixi_api/src/workspace/list/mod.rs @@ -43,17 +43,22 @@ pub async fn list( // Load the platform let platform = platform.unwrap_or_else(|| environment.best_platform()); + let locked_platform = lock_file.platform(platform.as_str()); + let locked_environment = lock_file.environment(environment.name().as_str()); // Get all the packages in the environment. - let locked_deps = lock_file - .environment(environment.name().as_str()) - .and_then(|env| env.packages(platform).map(Vec::from_iter)) - .unwrap_or_default(); + let locked_deps = match (locked_platform, locked_environment) { + (Some(locked_platform), Some(locked_environment)) => locked_environment + .packages(locked_platform) + .map(Vec::from_iter) + .unwrap_or_default(), + _ => Vec::new(), + }; let locked_deps_ext = locked_deps .into_iter() .map(|p| match p { - LockedPackageRef::Pypi(pypi_data, _) => { + LockedPackageRef::Pypi(pypi_data) => { let name = to_uv_normalize(&pypi_data.name)?; Ok(PackageExt::PyPI(pypi_data.clone(), name)) } diff --git a/crates/pixi_api/src/workspace/list/package.rs b/crates/pixi_api/src/workspace/list/package.rs index c505ab4a7d..e0334ae6af 100644 --- a/crates/pixi_api/src/workspace/list/package.rs +++ b/crates/pixi_api/src/workspace/list/package.rs @@ -1,15 +1,14 @@ -use std::borrow::Cow; -use std::collections::HashMap; +use std::{borrow::Cow, collections::HashMap}; -use pixi_uv_conversions::to_uv_version; use rattler_lock::{CondaPackageData, PypiPackageData, UrlOrPath}; use serde::Serialize; use uv_distribution::RegistryWheelIndex; +use uv_pypi_types::HashAlgorithm; #[derive(Debug, Clone, Serialize)] pub struct Package { pub name: String, - pub version: String, + pub version: Option, pub build: Option, pub build_number: Option, pub size_bytes: Option, @@ -49,7 +48,7 @@ impl Package { registry_index: Option<&'a mut RegistryWheelIndex<'b>>, ) -> Self { let name = package.name().to_string(); - let version = package.version().into_owned(); + let version = package.version(); let kind = PackageKind::from(package); let build = match package { @@ -76,12 +75,18 @@ impl Package { PackageExt::PyPI(p, name) => { // Check the hash to avoid non index packages to be handled by the registry // index as wheels - if p.hash.is_some() { + if let Some(hash) = &p.hash { if let Some(registry_index) = registry_index { // Handle case where the registry index is present let entry = registry_index.get(name).find(|i| { - i.dist.filename.version - == to_uv_version(&p.version).expect("invalid version") + i.dist.hashes.iter().any(|h| { + (h.algorithm == HashAlgorithm::Sha256 + && hash.sha256().map(|hash| format!("{hash:x}")).as_deref() + == Some(h.digest.as_ref())) + || (h.algorithm == HashAlgorithm::Md5 + && hash.md5().map(|hash| format!("{hash:x}")).as_deref() + == Some(h.digest.as_ref())) + }) }); let size = entry.and_then(|e| get_dir_size(e.dist.path.clone()).ok()); let name = entry.map(|e| e.dist.filename.to_string()); @@ -163,10 +168,14 @@ impl Package { ), CondaPackageData::Source(source) => (None, Some(source.location.to_string())), }, - PackageExt::PyPI(p, _) => match &p.location { - UrlOrPath::Url(url) => (None, Some(url.to_string())), - UrlOrPath::Path(path) => (None, Some(path.to_string())), - }, + PackageExt::PyPI(p, _) => ( + None, + Some( + p.location + .given() + .map_or_else(|| p.location.to_string(), ToOwned::to_owned), + ), + ), }; let requested_spec = requested_specs.get(&name).cloned(); @@ -174,7 +183,10 @@ impl Package { let is_editable = match package { PackageExt::Conda(_) => false, - PackageExt::PyPI(p, _) => p.editable, + PackageExt::PyPI(_p, _) => { + // TODO: Should be derived from the input specs. + false + } }; let constrains = match package { @@ -291,10 +303,10 @@ impl PackageExt { } /// Returns the version string of the package - pub fn version(&self) -> Cow<'_, str> { + pub fn version(&self) -> Option { match self { - Self::Conda(value) => value.record().version.as_str(), - Self::PyPI(value, _) => value.version.to_string().into(), + Self::Conda(value) => Some(value.record().version.to_string().into()), + Self::PyPI(value, _) => value.version.as_ref().map(|v| v.to_string().into()), } } } diff --git a/crates/pixi_cli/src/list.rs b/crates/pixi_cli/src/list.rs index 1df2b0390f..b0f7e52520 100644 --- a/crates/pixi_cli/src/list.rs +++ b/crates/pixi_cli/src/list.rs @@ -269,7 +269,7 @@ fn get_field_cell(package: &Package, field: Field) -> Cell { }; Cell::new(content) } - Field::Version => Cell::new(&package.version), + Field::Version => Cell::new(package.version.as_deref().unwrap_or_default()), Field::Build => Cell::new(package.build.as_deref().unwrap_or_default()), Field::BuildNumber => Cell::new( package diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index ec1685f0a1..89a4aa6ff8 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -778,9 +778,7 @@ impl PypiNoBuildCheck { // and not resolve correctly from the current working directory let is_editable = source .map(|source| match source { - PixiPypiSource::Path { path: _, editable } => { - editable.unwrap_or_default() - } + PixiPypiSource::Path { path: _, editable } => editable.unwrap_or_default(), _ => false, }) .unwrap_or_default(); From dc540b345adcc97110acd062d9d2d8e0e895014b Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 5 Mar 2026 10:55:59 +0100 Subject: [PATCH 04/15] fix: all tests, clippy and format --- crates/pixi_api/src/workspace/list/package.rs | 4 +- crates/pixi_build_rust/src/config.rs | 16 ++--- crates/pixi_build_rust/src/main.rs | 37 ++++++----- .../src/lock_file/satisfiability/mod.rs | 61 +++++++++++++------ crates/pixi_core/src/lock_file/update.rs | 4 +- .../src/lock_file/virtual_packages.rs | 2 +- crates/pixi_install_pypi/src/plan/test/mod.rs | 4 +- crates/pixi_manifest/src/toml/workspace.rs | 2 +- crates/pixi_pypi_spec/src/lib.rs | 2 +- 9 files changed, 73 insertions(+), 59 deletions(-) diff --git a/crates/pixi_api/src/workspace/list/package.rs b/crates/pixi_api/src/workspace/list/package.rs index e0334ae6af..3cc4c3b0ff 100644 --- a/crates/pixi_api/src/workspace/list/package.rs +++ b/crates/pixi_api/src/workspace/list/package.rs @@ -305,8 +305,8 @@ impl PackageExt { /// Returns the version string of the package pub fn version(&self) -> Option { match self { - Self::Conda(value) => Some(value.record().version.to_string().into()), - Self::PyPI(value, _) => value.version.as_ref().map(|v| v.to_string().into()), + Self::Conda(value) => Some(value.record().version.to_string()), + Self::PyPI(value, _) => value.version.as_ref().map(|v| v.to_string()), } } } diff --git a/crates/pixi_build_rust/src/config.rs b/crates/pixi_build_rust/src/config.rs index f5e6932aa6..a16de760cf 100644 --- a/crates/pixi_build_rust/src/config.rs +++ b/crates/pixi_build_rust/src/config.rs @@ -36,12 +36,6 @@ pub struct RustBackendConfig { pub binaries: Vec, } -impl Default for RustBackendConfig { - fn default() -> Self { - Self::new_with_system_environment() - } -} - fn collect_system_env() -> IndexMap { std::env::vars().collect() } @@ -72,10 +66,10 @@ impl RustBackendConfig { /// Creates a new [`RustBackendConfig`] with default values and /// `ignore_cargo_manifest` set to `true`. #[cfg(test)] - pub fn default_with_ignore_cargo_manifest() -> Self { + pub fn with_ignore_cargo_manifest(self) -> Self { Self { ignore_cargo_manifest: Some(true), - ..Default::default() + ..self } } } @@ -258,7 +252,7 @@ mod tests { binaries: vec![], }; - let empty_target_config = RustBackendConfig::default(); + let empty_target_config = RustBackendConfig::new_with_clean_environment(); let merged = base_config .merge_with_target_config(&empty_target_config) @@ -277,12 +271,12 @@ mod tests { fn test_merge_target_debug_dir_error() { let base_config = RustBackendConfig { debug_dir: Some(PathBuf::from("/base/debug")), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }; let target_config = RustBackendConfig { debug_dir: Some(PathBuf::from("/target/debug")), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }; let result = base_config.merge_with_target_config(&target_config); diff --git a/crates/pixi_build_rust/src/main.rs b/crates/pixi_build_rust/src/main.rs index d7133367dd..726335b515 100644 --- a/crates/pixi_build_rust/src/main.rs +++ b/crates/pixi_build_rust/src/main.rs @@ -246,6 +246,11 @@ pub async fn main() { #[cfg(test)] mod tests { + use cargo_toml::Manifest; + use indexmap::IndexMap; + use recipe_stage0::recipe::{Item, Value}; + + use super::*; #[tokio::test] async fn test_binaries_flag_is_rendered() { @@ -259,8 +264,7 @@ mod tests { &project_model, &RustBackendConfig { binaries: vec!["rattler-build".to_string()], - ignore_cargo_manifest: Some(true), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest() }, PathBuf::from("."), Platform::Linux64, @@ -275,17 +279,12 @@ mod tests { let content = &generated_recipe.recipe.build.script.content; assert!(content.contains("--bin rattler-build")); } - use cargo_toml::Manifest; - use indexmap::IndexMap; - use recipe_stage0::recipe::{Item, Value}; - - use super::*; #[test] fn test_input_globs_includes_extra_globs() { let config = RustBackendConfig { extra_input_globs: vec!["custom/*.txt".to_string(), "extra/**/*.py".to_string()], - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }; let generator = RustGenerator::default(); @@ -339,7 +338,7 @@ mod tests { let generated_recipe = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default_with_ignore_cargo_manifest(), + &RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest(), PathBuf::from("."), Platform::Linux64, None, @@ -384,7 +383,7 @@ mod tests { let generated_recipe = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default_with_ignore_cargo_manifest(), + &RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest(), PathBuf::from("."), Platform::Linux64, None, @@ -428,7 +427,7 @@ mod tests { env: env.clone(), system_env: Default::default(), ignore_cargo_manifest: Some(true), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }, PathBuf::from("."), Platform::Linux64, @@ -473,7 +472,7 @@ mod tests { env, system_env, ignore_cargo_manifest: Some(true), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }, PathBuf::from("."), Platform::Linux64, @@ -507,7 +506,7 @@ mod tests { let generated_recipe = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default(), + &RustBackendConfig::new_with_clean_environment(), // Using this crate itself, as it has interesting metadata, using .workspace std::env::current_dir().unwrap(), Platform::Linux64, @@ -608,7 +607,7 @@ mod tests { let result = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default(), + &RustBackendConfig::new_with_clean_environment(), PathBuf::from("/non/existent/path"), Platform::Linux64, None, @@ -639,7 +638,7 @@ mod tests { let result = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default_with_ignore_cargo_manifest(), + &RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest(), std::env::current_dir().unwrap(), Platform::Linux64, None, @@ -678,7 +677,7 @@ mod tests { &RustBackendConfig { compilers: Some(vec!["rust".to_string(), "c".to_string(), "cxx".to_string()]), ignore_cargo_manifest: Some(true), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }, PathBuf::from("."), Platform::Linux64, @@ -746,7 +745,7 @@ mod tests { &RustBackendConfig { compilers: None, ignore_cargo_manifest: Some(true), - ..Default::default() + ..RustBackendConfig::new_with_clean_environment() }, PathBuf::from("."), Platform::Linux64, @@ -828,7 +827,7 @@ mod tests { let generated_recipe = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default_with_ignore_cargo_manifest(), + &RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest(), PathBuf::from("."), Platform::Linux64, None, @@ -925,7 +924,7 @@ mod tests { let generated_recipe = RustGenerator::default() .generate_recipe( &project_model, - &RustBackendConfig::default_with_ignore_cargo_manifest(), + &RustBackendConfig::new_with_clean_environment().with_ignore_cargo_manifest(), PathBuf::from("."), Platform::Linux64, None, diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index 89a4aa6ff8..5fa6ebd069 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -2122,7 +2122,7 @@ pub(crate) async fn verify_package_platform_satisfiability( if requirement.is_editable() { if let Err(err) = - pypi_satisfies_editable(&requirement, &record, project_root) + pypi_satisfies_editable(&requirement, record, project_root) { delayed_pypi_error.get_or_insert(err); } @@ -2130,7 +2130,7 @@ pub(crate) async fn verify_package_platform_satisfiability( FoundPackage::PyPi(PypiPackageIdx(idx), requirement.extras.to_vec()) } else { if let Err(err) = - pypi_satisfies_requirement(&requirement, &record, project_root) + pypi_satisfies_requirement(&requirement, record, project_root) { delayed_pypi_error.get_or_insert(err); } @@ -2942,15 +2942,12 @@ mod tests { .unwrap(); } - // Currently this test is missing from `good_satisfiability`, so we test the - // specific windows case here this should work an all supported platforms - // - // Do not use windows here: The path gets normalized to something unix-y, and - // the lockfile keeps the "pretty" path the suer filled in at all times. So - // on windows the test fails. + // Do not use unix paths on windows: The path gets normalized to something + // unix-y, and the lockfile keeps the "pretty" path the user filled in at + // all times. So on windows the test fails. + #[cfg(not(target_os = "windows"))] #[test] fn test_unix_absolute_path_handling() { - // Mock locked data let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), version: Some(Version::from_str("0.1.0").unwrap()), @@ -2966,7 +2963,27 @@ mod tests { let spec = pep508_requirement_to_uv_requirement(spec).unwrap(); - // This should satisfy: + pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); + } + + #[test] + fn test_windows_absolute_path_handling() { + let locked_data = PypiPackageData { + name: "mypkg".parse().unwrap(), + version: Some(Version::from_str("0.1.0").unwrap()), + location: Verbatim::new(UrlOrPath::Path("C:\\Users\\username\\mypkg.tar.gz".into())), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + }; + + let spec = + pep508_rs::Requirement::from_str("mypkg @ file:///C:\\Users\\username\\mypkg.tar.gz") + .unwrap(); + + let spec = pep508_requirement_to_uv_requirement(spec).unwrap(); + pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); } @@ -2991,15 +3008,21 @@ mod tests { let pypi_no_build_check = PypiNoBuildCheck::new(Some(&NoBuild::All)); pypi_no_build_check - .check(&PypiPackageData { - name: PackageName::from_str("sdist").expect("invalid name"), - version: pep440_rs::Version::from_str("0.0.0").expect("invalid version"), - location: UrlOrPath::from_str(".").expect("invalid path"), - hash: None, - requires_dist: vec![], - requires_python: None, - editable: true, - }) + .check( + &PypiPackageData { + name: PackageName::from_str("sdist").expect("invalid name"), + version: Some(pep440_rs::Version::from_str("0.0.0").expect("invalid version")), + location: UrlOrPath::from_str(".").expect("invalid path").into(), + index_url: None, + hash: None, + requires_dist: vec![], + requires_python: None, + }, + Some(&PixiPypiSource::Path { + path: PathBuf::from("").into(), + editable: Some(true), + }), + ) .expect("check must pass"); } } diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index 6fbb1fc62e..2bec59eb8d 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -1393,9 +1393,7 @@ impl<'p> UpdateContextBuilder<'p> { .map(|(lock_platform, records)| { ( lock_platform.subdir(), - Arc::new(PypiRecordsByName::from_iter( - records.map(|data| data.clone()), - )), + Arc::new(PypiRecordsByName::from_iter(records.cloned())), ) }) .collect(), diff --git a/crates/pixi_core/src/lock_file/virtual_packages.rs b/crates/pixi_core/src/lock_file/virtual_packages.rs index dbfd2039dc..bd9f400748 100644 --- a/crates/pixi_core/src/lock_file/virtual_packages.rs +++ b/crates/pixi_core/src/lock_file/virtual_packages.rs @@ -266,7 +266,7 @@ pub(crate) fn validate_system_meets_environment_requirements( .ok_or(MachineValidationError::NoPythonRecordFound(platform))?; // Check if all the wheel tags match the system virtual packages - let pypi_packages = pypi_packages.map(|pkg_data| pkg_data.clone()).collect_vec(); + let pypi_packages = pypi_packages.cloned().collect_vec(); let wheels = get_wheels_from_pypi_package_data(pypi_packages); diff --git a/crates/pixi_install_pypi/src/plan/test/mod.rs b/crates/pixi_install_pypi/src/plan/test/mod.rs index 380be30c1a..33072b5659 100644 --- a/crates/pixi_install_pypi/src/plan/test/mod.rs +++ b/crates/pixi_install_pypi/src/plan/test/mod.rs @@ -124,7 +124,7 @@ fn test_install_required_mismatch() { assert_matches!( installs.reinstalls[0].1, NeedReinstall::VersionMismatch { ref installed_version, ref locked_version } - if installed_version.to_string() == "0.6.0" && locked_version.to_string() == "0.7.0" + if installed_version.to_string() == "0.6.0" && locked_version == "0.7.0" ); assert!(installs.cached.is_empty()); // Not cached we get it from the remote @@ -160,7 +160,7 @@ fn test_install_required_mismatch_cached() { assert_matches!( installs.reinstalls[0].1, NeedReinstall::VersionMismatch { ref installed_version, ref locked_version } - if installed_version.to_string() == "0.6.0" && locked_version.to_string() == "0.7.0" + if installed_version.to_string() == "0.6.0" && locked_version == "0.7.0" ); assert!(installs.remote.is_empty()); // Not cached we get it from the remote diff --git a/crates/pixi_manifest/src/toml/workspace.rs b/crates/pixi_manifest/src/toml/workspace.rs index f92c54ae30..d7e002ca02 100644 --- a/crates/pixi_manifest/src/toml/workspace.rs +++ b/crates/pixi_manifest/src/toml/workspace.rs @@ -393,7 +393,7 @@ mod test { .and_then(|w| w.into_workspace(ExternalWorkspaceProperties::default(), path)) .unwrap_err(); assert_snapshot!(format_parse_error(input, parse_error), @r#" - × `date` is neither a valid date (input contains invalid characters) nor a valid datetime (input contains invalid characters) + × `date` is neither a valid date (input contains invalid characters) nor a valid datetime (premature end of input) ╭─[pixi.toml:4:26] 3 │ platforms = [] 4 │ exclude-newer = "date" diff --git a/crates/pixi_pypi_spec/src/lib.rs b/crates/pixi_pypi_spec/src/lib.rs index 216929b1bf..85286d775b 100644 --- a/crates/pixi_pypi_spec/src/lib.rs +++ b/crates/pixi_pypi_spec/src/lib.rs @@ -616,7 +616,7 @@ mod tests { assert_eq!( as_pypi_req, PixiPypiSpec::new(PixiPypiSource::Path { - path: PathBuf::from("C:/path/to/boltons"), + path: PathBuf::from("C:/path/to/boltons").into(), editable: None, }) ); From 0597490500742c86d60d6e5818fb2a40a6c51bc3 Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 5 Mar 2026 14:07:39 +0100 Subject: [PATCH 05/15] fix: pytests --- pixi.lock | 61 +++++++++++++++++++ pixi.toml | 1 + .../integration_python/pixi_build/test_git.py | 17 ++---- .../test_specified_build_source/test_git.py | 4 +- 4 files changed, 69 insertions(+), 14 deletions(-) diff --git a/pixi.lock b/pixi.lock index ac7c217dfa..2b07e98769 100644 --- a/pixi.lock +++ b/pixi.lock @@ -297,12 +297,14 @@ environments: - conda: https://prefix.dev/conda-forge/linux-64/perl-5.32.1-7_hd590300_perl5.conda - conda: https://prefix.dev/conda-forge/linux-64/pkg-config-0.29.2-h4bc722e_1009.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/linux-64/py-rattler-0.20.0-py310h70157a2_0.conda - conda: https://prefix.dev/conda-forge/noarch/pydantic-2.12.5-pyhcf101f3_1.conda - conda: https://prefix.dev/conda-forge/linux-64/pydantic-core-2.41.5-py313h843e2db_1.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -319,6 +321,7 @@ environments: - conda: https://prefix.dev/conda-forge/linux-64/rust-1.90.0-h53717f1_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-src-1.90.0-unix_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.90.0-h2c6d0dc_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/linux-64/shellcheck-0.10.0-ha770c72_0.conda - conda: https://prefix.dev/conda-forge/noarch/sysroot_linux-64-2.28-h4ee821c_9.conda - conda: https://prefix.dev/conda-forge/linux-64/taplo-0.10.0-h2d22210_1.conda @@ -443,12 +446,14 @@ environments: - conda: https://prefix.dev/conda-forge/linux-aarch64/perl-5.32.1-7_h31becfc_perl5.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/pkg-config-0.29.2-hce167ba_1009.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/py-rattler-0.20.0-py310h48c5ec3_0.conda - conda: https://prefix.dev/conda-forge/noarch/pydantic-2.12.5-pyhcf101f3_1.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/pydantic-core-2.41.5-py313h5e7b836_1.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -465,6 +470,7 @@ environments: - conda: https://prefix.dev/conda-forge/linux-aarch64/rust-1.90.0-h6cf38e9_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-src-1.90.0-unix_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-std-aarch64-unknown-linux-gnu-1.90.0-hbe8e118_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/shellcheck-0.10.0-h8af1aa0_0.conda - conda: https://prefix.dev/conda-forge/noarch/sysroot_linux-aarch64-2.28-h585391f_9.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/taplo-0.10.0-h3618846_1.conda @@ -573,12 +579,14 @@ environments: - conda: https://prefix.dev/conda-forge/osx-64/perl-5.32.1-7_h10d778d_perl5.conda - conda: https://prefix.dev/conda-forge/osx-64/pkg-config-0.29.2-hf7e621a_1009.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/osx-64/py-rattler-0.20.0-py310h9420a0c_0.conda - conda: https://prefix.dev/conda-forge/noarch/pydantic-2.12.5-pyhcf101f3_1.conda - conda: https://prefix.dev/conda-forge/osx-64/pydantic-core-2.41.5-py313hcc225dc_1.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -596,6 +604,7 @@ environments: - conda: https://prefix.dev/conda-forge/noarch/rust-src-1.90.0-unix_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.90.0-h38e4360_0.conda - conda: https://prefix.dev/conda-forge/noarch/sdkroot_env_osx-64-26.0-h62b880e_6.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/osx-64/shellcheck-0.10.0-h7dd6a17_0.conda - conda: https://prefix.dev/conda-forge/osx-64/sigtool-codesign-0.1.3-hc0f2934_0.conda - conda: https://prefix.dev/conda-forge/osx-64/tapi-1600.0.11.8-h8d8e812_0.conda @@ -705,12 +714,14 @@ environments: - conda: https://prefix.dev/conda-forge/osx-arm64/perl-5.32.1-7_h4614cfb_perl5.conda - conda: https://prefix.dev/conda-forge/osx-arm64/pkg-config-0.29.2-hde07d2e_1009.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/osx-arm64/py-rattler-0.20.0-py310h9bd0991_0.conda - conda: https://prefix.dev/conda-forge/noarch/pydantic-2.12.5-pyhcf101f3_1.conda - conda: https://prefix.dev/conda-forge/osx-arm64/pydantic-core-2.41.5-py313h2c089d5_1.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -728,6 +739,7 @@ environments: - conda: https://prefix.dev/conda-forge/noarch/rust-src-1.90.0-unix_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.90.0-hf6ec828_0.conda - conda: https://prefix.dev/conda-forge/noarch/sdkroot_env_osx-arm64-26.0-ha3f98da_6.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/shellcheck-0.10.0-hecfb573_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/sigtool-codesign-0.1.3-h98dc951_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/tapi-1600.0.11.8-h997e182_0.conda @@ -800,12 +812,14 @@ environments: - conda: https://prefix.dev/conda-forge/win-64/pcre2-10.47-hd2b5f0e_0.conda - conda: https://prefix.dev/conda-forge/win-64/pkg-config-0.29.2-h88c491f_1009.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/win-64/py-rattler-0.20.0-py310hb39080a_0.conda - conda: https://prefix.dev/conda-forge/noarch/pydantic-2.12.5-pyhcf101f3_1.conda - conda: https://prefix.dev/conda-forge/win-64/pydantic-core-2.41.5-py313hfbe8231_1.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -820,6 +834,7 @@ environments: - conda: https://prefix.dev/conda-forge/win-64/rust-1.90.0-hf8d6059_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-src-1.90.0-win_0.conda - conda: https://prefix.dev/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.90.0-h17fc481_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/win-64/shellcheck-0.10.0-h57928b3_0.conda - conda: https://prefix.dev/conda-forge/win-64/taplo-0.10.0-h63977a8_1.conda - conda: https://prefix.dev/conda-forge/win-64/tk-8.6.13-h6ed50ae_3.conda @@ -1801,10 +1816,12 @@ environments: - conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda - conda: https://prefix.dev/conda-forge/linux-64/perl-5.32.1-7_hd590300_perl5.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/linux-64/py-rattler-0.20.0-py310h70157a2_0.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -1816,6 +1833,7 @@ environments: - conda: https://prefix.dev/conda-forge/linux-64/rattler-build-0.58.0-ha759004_0.conda - conda: https://prefix.dev/conda-forge/linux-64/readline-8.3-h853b02a_0.conda - conda: https://prefix.dev/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-w-1.2.0-pyhd8ed1ab_0.conda @@ -1879,10 +1897,12 @@ environments: - conda: https://prefix.dev/conda-forge/linux-aarch64/pcre2-10.47-hf841c20_0.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/perl-5.32.1-7_h31becfc_perl5.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/py-rattler-0.20.0-py310h48c5ec3_0.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -1894,6 +1914,7 @@ environments: - conda: https://prefix.dev/conda-forge/linux-aarch64/rattler-build-0.58.0-h1d7f6d8_0.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/readline-8.3-hb682ff5_0.conda - conda: https://prefix.dev/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/linux-aarch64/tk-8.6.13-noxft_h0dc03b3_103.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-w-1.2.0-pyhd8ed1ab_0.conda @@ -1947,10 +1968,12 @@ environments: - conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda - conda: https://prefix.dev/conda-forge/osx-64/perl-5.32.1-7_h10d778d_perl5.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/osx-64/py-rattler-0.20.0-py310h9420a0c_0.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -1962,6 +1985,7 @@ environments: - conda: https://prefix.dev/conda-forge/osx-64/rattler-build-0.58.0-hcb3c93d_0.conda - conda: https://prefix.dev/conda-forge/osx-64/readline-8.3-h68b038d_0.conda - conda: https://prefix.dev/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-w-1.2.0-pyhd8ed1ab_0.conda @@ -2016,10 +2040,12 @@ environments: - conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/perl-5.32.1-7_h4614cfb_perl5.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/osx-arm64/py-rattler-0.20.0-py310h9bd0991_0.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -2031,6 +2057,7 @@ environments: - conda: https://prefix.dev/conda-forge/osx-arm64/rattler-build-0.58.0-h2307240_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda - conda: https://prefix.dev/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-w-1.2.0-pyhd8ed1ab_0.conda @@ -2071,10 +2098,12 @@ environments: - conda: https://prefix.dev/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/pathspec-1.0.4-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda - conda: https://prefix.dev/conda-forge/win-64/py-rattler-0.20.0-py310hb39080a_0.conda - conda: https://prefix.dev/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pyproject_hooks-1.2.0-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-timeout-2.4.0-pyhd8ed1ab_0.conda - conda: https://prefix.dev/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda @@ -2085,6 +2114,7 @@ environments: - conda: https://prefix.dev/conda-forge/win-64/pyyaml-6.0.3-py313hd650c13_1.conda - conda: https://prefix.dev/conda-forge/win-64/rattler-build-0.58.0-he94b42d_0.conda - conda: https://prefix.dev/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda + - conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda - conda: https://prefix.dev/conda-forge/win-64/tk-8.6.13-h6ed50ae_3.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda - conda: https://prefix.dev/conda-forge/noarch/tomli-w-1.2.0-pyhd8ed1ab_0.conda @@ -10073,6 +10103,15 @@ packages: license_family: MIT size: 25877 timestamp: 1764896838868 +- conda: https://prefix.dev/conda-forge/noarch/pprintpp-0.4.0-pyhd8ed1ab_6.conda + sha256: d05df88a0f124674f53f71a202677f8e39a51b15fd5955a641a4c603178b8595 + md5: b18522851a856112f953a46bb27329e1 + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + size: 18591 + timestamp: 1734642267876 - conda: https://prefix.dev/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda sha256: 4817651a276016f3838957bfdf963386438c70761e9faec7749d411635979bae md5: edb16f14d920fb3faf17f5ce582942d6 @@ -10560,6 +10599,19 @@ packages: license_family: MIT size: 299581 timestamp: 1765062031645 +- conda: https://prefix.dev/conda-forge/noarch/pytest-clarity-1.0.1-pyhd8ed1ab_1.conda + sha256: ea1f1431b595e26c4f17bf73abfe977778b76c574c7f08762b007cfd06a486ee + md5: 2a456e36ccfc0b7011c3b7d44f01886a + depends: + - pprintpp >=0.4.0 + - pytest >=3.5.0 + - python >=3.9 + - rich >=8.0.0 + - setuptools + license: MIT + license_family: MIT + size: 11147 + timestamp: 1736876408107 - conda: https://prefix.dev/conda-forge/noarch/pytest-rerunfailures-16.1-pyhd8ed1ab_0.conda sha256: 437f0e7805e471dcc57afd4b122d5025fa2162e4c031dc9e8c6f2c05c4d50cc0 md5: b57fe0c7e03b97c3554e6cea827e2058 @@ -11709,6 +11761,15 @@ packages: license_family: MIT size: 637506 timestamp: 1770634745653 +- conda: https://prefix.dev/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + sha256: 82088a6e4daa33329a30bc26dc19a98c7c1d3f05c0f73ce9845d4eab4924e9e1 + md5: 8e194e7b992f99a5015edbd4ebd38efd + depends: + - python >=3.10 + license: MIT + license_family: MIT + size: 639697 + timestamp: 1773074868565 - conda: https://prefix.dev/conda-forge/linux-64/shellcheck-0.10.0-ha770c72_0.conda sha256: 6809031184c07280dcbaed58e15020317226a3ed234b99cb1bd98384ea5be813 md5: 61b19e9e334ddcdf8bb2422ee576549e diff --git a/pixi.toml b/pixi.toml index 965f261007..1c54878dd9 100644 --- a/pixi.toml +++ b/pixi.toml @@ -268,6 +268,7 @@ pytest = ">=9.0.2,<10" pytest-rerunfailures = ">=16.0.1,<17" pytest-timeout = ">=2.4.0,<3" pytest-xdist = ">=3.8.0,<4" +pytest-clarity = ">=1.0.1,<2" pyyaml = ">=6.0.3,<7" rattler-build = ">=0.58.0,<0.59" rich = ">=14.1.0,<15" diff --git a/tests/integration_python/pixi_build/test_git.py b/tests/integration_python/pixi_build/test_git.py index 4b5a09f227..61fa5985de 100644 --- a/tests/integration_python/pixi_build/test_git.py +++ b/tests/integration_python/pixi_build/test_git.py @@ -57,7 +57,7 @@ def test_build_git_source_deps(pixi: Path, tmp_pixi_workspace: Path, build_data: # verify that we indeed recorded the git url with it's commit pixi_lock_file = minimal_workspace / "pixi.lock" - assert f"conda: git+{target_git_url}#{commit_hash}" in pixi_lock_file.read_text() + assert f"@ git+{target_git_url}#{commit_hash}" in pixi_lock_file.read_text() # now we update source code so we can verify that # both pixi-git will discover a new commit @@ -80,7 +80,7 @@ def test_build_git_source_deps(pixi: Path, tmp_pixi_workspace: Path, build_data: # verify that we indeed recorded the git url with it's commit pixi_lock_file = minimal_workspace / "pixi.lock" - assert f"conda: git+{target_git_url}#{new_commit_hash}" in pixi_lock_file.read_text() + assert f"@ git+{target_git_url}#{new_commit_hash}" in pixi_lock_file.read_text() # run the *built* script to verify that new name is used verify_cli_command( @@ -147,10 +147,7 @@ def test_build_git_source_deps_from_branch( pixi_lock_file = minimal_workspace / "pixi.lock" # verify that we recorded used the branch - assert ( - f"conda: git+{target_git_url}?branch=test-branch#{commit_hash}" - in pixi_lock_file.read_text() - ) + assert f"@ git+{target_git_url}?branch=test-branch#{commit_hash}" in pixi_lock_file.read_text() @pytest.mark.slow @@ -212,8 +209,7 @@ def test_build_git_source_deps_from_rev( # verify that we recorded used rev but also the full one assert ( - f"conda: git+{target_git_url}?rev={commit_hash[:7]}#{commit_hash}" - in pixi_lock_file.read_text() + f"@ git+{target_git_url}?rev={commit_hash[:7]}#{commit_hash}" in pixi_lock_file.read_text() ) @@ -271,7 +267,4 @@ def test_build_git_source_deps_from_tag( pixi_lock_file = minimal_workspace / "pixi.lock" # verify that we recorded used rev but also the full one - assert ( - f"conda: git+{target_git_dir.as_uri()}?tag=v1.0.0#{commit_hash}" - in pixi_lock_file.read_text() - ) + assert f"@ git+{target_git_dir.as_uri()}?tag=v1.0.0#{commit_hash}" in pixi_lock_file.read_text() diff --git a/tests/integration_python/pixi_build/test_specified_build_source/test_git.py b/tests/integration_python/pixi_build/test_specified_build_source/test_git.py index 2ca7e47adb..97764c84d6 100644 --- a/tests/integration_python/pixi_build/test_specified_build_source/test_git.py +++ b/tests/integration_python/pixi_build/test_specified_build_source/test_git.py @@ -30,7 +30,7 @@ def iter_entries() -> Any: for entry in iter_entries(): if isinstance(entry, dict): entry = cast(dict[str, Any], entry) - if entry.get("conda") == ".": + if (v := entry.get("source")) and v.endswith("@ ."): package_build_source = entry.get("package_build_source") if package_build_source is not None: serialized_sources.append(package_build_source) @@ -413,7 +413,7 @@ def test_git_path_lock_detects_manual_rev_change( def mutate(node: Any) -> None: if isinstance(node, dict): node = cast(dict[str, Any], node) - if node.get("conda") == "." and "package_build_source" in node: + if (v := node.get("source")) and v.endswith("@ ."): node["package_build_source"]["rev"] = local_cpp_git_repo.other_feature_rev for value in node.values(): mutate(value) From ed069d9651f754b823a67d42dba889556866af21 Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 5 Mar 2026 16:55:30 +0100 Subject: [PATCH 06/15] tests and fixes for dynamic versions --- .../src/lock_file/records_by_name.rs | 138 ++++++++++++++++-- .../pixi_core/src/lock_file/resolve/pypi.rs | 8 +- .../src/lock_file/satisfiability/mod.rs | 95 +++++++++++- ...g_satisfiability@pypi-missing-version.snap | 8 + ...satisfiability@pypi-no-longer-dynamic.snap | 9 ++ .../pypi-missing-version/pixi.lock | 29 ++++ .../pypi-missing-version/pixi.toml | 13 ++ .../dynamic-dep/dynamic_dep/__init__.py | 0 .../dynamic-dep/pyproject.toml | 10 ++ .../dynamic-dep/setup.py | 2 + .../pypi-no-longer-dynamic/pixi.lock | 26 ++++ .../pypi-no-longer-dynamic/pixi.toml | 10 ++ .../dynamic-dep/dynamic_dep/__init__.py | 0 .../dynamic-dep/pyproject.toml | 7 + .../pypi-dynamic-version/dynamic-dep/setup.py | 2 + .../pypi-dynamic-version/pixi.lock | 22 +++ .../pypi-dynamic-version/pixi.toml | 10 ++ 17 files changed, 372 insertions(+), 17 deletions(-) create mode 100644 crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-missing-version.snap create mode 100644 crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap create mode 100644 tests/data/non-satisfiability/pypi-missing-version/pixi.lock create mode 100644 tests/data/non-satisfiability/pypi-missing-version/pixi.toml create mode 100644 tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/dynamic_dep/__init__.py create mode 100644 tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/pyproject.toml create mode 100644 tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py create mode 100644 tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.lock create mode 100644 tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.toml create mode 100644 tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/dynamic_dep/__init__.py create mode 100644 tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/pyproject.toml create mode 100644 tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py create mode 100644 tests/data/satisfiability/pypi-dynamic-version/pixi.lock create mode 100644 tests/data/satisfiability/pypi-dynamic-version/pixi.toml diff --git a/crates/pixi_core/src/lock_file/records_by_name.rs b/crates/pixi_core/src/lock_file/records_by_name.rs index 93447482b8..7169dbd055 100644 --- a/crates/pixi_core/src/lock_file/records_by_name.rs +++ b/crates/pixi_core/src/lock_file/records_by_name.rs @@ -20,8 +20,9 @@ pub trait HasNameVersion { /// Returns the name of the dependency fn name(&self) -> &Self::N; - /// Returns the version of the dependency - fn version(&self) -> &Self::V; + /// Returns the version of the dependency, or `None` if the version is + /// unknown (e.g. a pypi source dependency with a dynamic version). + fn version(&self) -> Option<&Self::V>; } impl HasNameVersion for PypiPackageData { @@ -31,10 +32,8 @@ impl HasNameVersion for PypiPackageData { fn name(&self) -> &pep508_rs::PackageName { &self.name } - fn version(&self) -> &Self::V { - self.version - .as_ref() - .expect("pypi record dedup requires a version") + fn version(&self) -> Option<&Self::V> { + self.version.as_ref() } } @@ -45,8 +44,8 @@ impl HasNameVersion for RepoDataRecord { fn name(&self) -> &rattler_conda_types::PackageName { &self.package_record.name } - fn version(&self) -> &Self::V { - &self.package_record.version + fn version(&self) -> Option<&Self::V> { + Some(&self.package_record.version) } } @@ -58,8 +57,8 @@ impl HasNameVersion for PixiRecord { &self.package_record().name } - fn version(&self) -> &Self::V { - &self.package_record().version + fn version(&self) -> Option<&Self::V> { + Some(&self.package_record().version) } } @@ -163,10 +162,15 @@ impl DependencyRecordsByName { entry.insert(idx); } Entry::Occupied(entry) => { - // Use the entry with the highest version or otherwise the first we encounter. + // Use the entry with the highest version or otherwise the first + // we encounter. If either version is `None` (e.g. a pypi source + // dependency with a dynamic version), keep the existing entry. let idx = *entry.get(); - if records[idx].version() < record.version() { - records[idx] = record; + if let (Some(existing), Some(new)) = (records[idx].version(), record.version()) + { + if existing < new { + records[idx] = record; + } } } } @@ -218,3 +222,111 @@ impl PixiRecordsByName { .collect::, ConversionError>>() } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::lock_file::PypiPackageData; + use rattler_lock::{UrlOrPath, Verbatim}; + use std::str::FromStr; + + fn make_pypi_package(name: &str, version: Option<&str>) -> PypiPackageData { + PypiPackageData { + name: name.parse().unwrap(), + version: version.map(|v| pep440_rs::Version::from_str(v).unwrap()), + location: Verbatim::new(UrlOrPath::Path(format!("./{name}").into())), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + } + } + + #[test] + fn from_iter_with_none_version_does_not_panic() { + // A single package with no version should work fine. + let records = vec![make_pypi_package("dynamic-dep", None)]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 1); + assert!(by_name.records[0].version.is_none()); + } + + #[test] + fn from_iter_dedup_keeps_first_when_both_versions_none() { + // Two packages with the same name and no version — should keep the first. + let records = vec![ + make_pypi_package("dynamic-dep", None), + make_pypi_package("dynamic-dep", None), + ]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 1); + assert!(by_name.records[0].version.is_none()); + } + + #[test] + fn from_iter_dedup_keeps_first_when_existing_has_no_version() { + // First entry has no version, second has a version — keeps the first + // because we can't compare None to Some. + let records = vec![ + make_pypi_package("pkg", None), + make_pypi_package("pkg", Some("1.0.0")), + ]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 1); + assert!(by_name.records[0].version.is_none()); + } + + #[test] + fn from_iter_dedup_keeps_first_when_new_has_no_version() { + // First entry has a version, second has no version — keeps the first. + let records = vec![ + make_pypi_package("pkg", Some("1.0.0")), + make_pypi_package("pkg", None), + ]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 1); + assert_eq!( + by_name.records[0].version.as_ref().unwrap().to_string(), + "1.0.0" + ); + } + + #[test] + fn from_iter_dedup_picks_higher_version() { + let records = vec![ + make_pypi_package("pkg", Some("1.0.0")), + make_pypi_package("pkg", Some("2.0.0")), + ]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 1); + assert_eq!( + by_name.records[0].version.as_ref().unwrap().to_string(), + "2.0.0" + ); + } + + #[test] + fn from_unique_iter_with_none_version() { + // from_unique_iter should work fine with None version (it doesn't compare versions). + let records = vec![make_pypi_package("dynamic-dep", None)]; + let by_name = PypiRecordsByName::from_unique_iter(records).unwrap(); + assert_eq!(by_name.len(), 1); + assert!(by_name.records[0].version.is_none()); + } + + #[test] + fn mixed_versioned_and_dynamic_packages() { + let records = vec![ + make_pypi_package("versioned-pkg", Some("1.0.0")), + make_pypi_package("dynamic-pkg", None), + ]; + let by_name = PypiRecordsByName::from_iter(records); + assert_eq!(by_name.len(), 2); + + let versioned = by_name.by_name(&"versioned-pkg".parse().unwrap()).unwrap(); + assert_eq!(versioned.version.as_ref().unwrap().to_string(), "1.0.0"); + + let dynamic = by_name.by_name(&"dynamic-pkg".parse().unwrap()).unwrap(); + assert!(dynamic.version.is_none()); + } +} diff --git a/crates/pixi_core/src/lock_file/resolve/pypi.rs b/crates/pixi_core/src/lock_file/resolve/pypi.rs index a345d163b1..e401bfa330 100644 --- a/crates/pixi_core/src/lock_file/resolve/pypi.rs +++ b/crates/pixi_core/src/lock_file/resolve/pypi.rs @@ -427,7 +427,13 @@ pub async fn resolve_pypi( // A python-3.10.6-xxx.conda package record becomes a "==3.10.6.*" requires python specifier. let python_specifier = uv_pep440::VersionSpecifier::from_version( uv_pep440::Operator::EqualStar, - uv_pep440::Version::from_str(&python_record.version().as_str()).into_diagnostic()?, + uv_pep440::Version::from_str( + &python_record + .version() + .expect("python record always has a version") + .as_str(), + ) + .into_diagnostic()?, ) .into_diagnostic() .context("error creating version specifier for python version")?; diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index 5fa6ebd069..f88a083b42 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -1041,9 +1041,17 @@ pub(crate) fn pypi_satisfies_requirement( match &spec.source { RequirementSource::Registry { specifier, .. } => { - // In the old way we always satisfy based on version so let's keep it similar - // here - let version_string = locked_data.version_string(); + // If the locked package has no version (e.g. a source dependency with + // dynamic version), it cannot satisfy a registry version specifier. + let Some(locked_version) = &locked_data.version else { + return Err(PlatformUnsat::LockedPyPIVersionsMismatch { + name: spec.name.clone().to_string(), + specifiers: specifier.clone().to_string(), + version: locked_data.version_string(), + } + .into()); + }; + let version_string = locked_version.to_string(); if specifier.contains( &uv_pep440::Version::from_str(&version_string).expect("could not parse version"), ) { @@ -3025,4 +3033,85 @@ mod tests { ) .expect("check must pass"); } + + /// Test that `pypi_satisfies_requirement` works correctly when a pypi + /// package has no version (dynamic version from a source dependency). + /// Path-based requirements should still satisfy. + #[cfg(not(target_os = "windows"))] + #[test] + fn test_pypi_satisfies_path_requirement_without_version() { + let locked_data = PypiPackageData { + name: "dynamic-dep".parse().unwrap(), + version: None, + location: Verbatim::new(UrlOrPath::Path("/home/user/project/dynamic-dep".into())), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + }; + + let spec = pep508_requirement_to_uv_requirement( + pep508_rs::Requirement::from_str("dynamic-dep @ file:///home/user/project/dynamic-dep") + .unwrap(), + ) + .unwrap(); + + // A path-based source dependency without a version should still satisfy + // a path-based requirement. + pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); + } + + /// Windows variant of the path-based dynamic version test. + #[cfg(target_os = "windows")] + #[test] + fn test_pypi_satisfies_path_requirement_without_version() { + let locked_data = PypiPackageData { + name: "dynamic-dep".parse().unwrap(), + version: None, + location: Verbatim::new(UrlOrPath::Path( + "C:\\Users\\user\\project\\dynamic-dep".into(), + )), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + }; + + let spec = pep508_requirement_to_uv_requirement( + pep508_rs::Requirement::from_str( + "dynamic-dep @ file:///C:\\Users\\user\\project\\dynamic-dep", + ) + .unwrap(), + ) + .unwrap(); + + // A path-based source dependency without a version should still satisfy + // a path-based requirement. + pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); + } + + /// Test that `pypi_satisfies_requirement` works with a git-based + /// requirement when the locked package has no version. + #[test] + fn test_pypi_satisfies_git_requirement_without_version() { + let locked_data = PypiPackageData { + name: "mypkg".parse().unwrap(), + version: None, + location: "git+https://github.com/mypkg.git#29932f3915935d773dc8d52c292cadd81c81071d" + .parse() + .expect("failed to parse url"), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + }; + + let spec = pep508_requirement_to_uv_requirement( + pep508_rs::Requirement::from_str("mypkg @ git+https://github.com/mypkg").unwrap(), + ) + .unwrap(); + + // A git-based source dependency without a version should still satisfy. + pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); + } } diff --git a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-missing-version.snap b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-missing-version.snap new file mode 100644 index 0000000000..87fecd3354 --- /dev/null +++ b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-missing-version.snap @@ -0,0 +1,8 @@ +--- +source: crates/pixi_core/src/lock_file/satisfiability/mod.rs +assertion_line: 2788 +expression: s +--- +environment 'default' does not satisfy the requirements of the project for platform 'win-64' + Diagnostic severity: error + Caused by: 'my-dep' with specifiers '>=1.0' does not match the locked version '' diff --git a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap new file mode 100644 index 0000000000..d3b77da011 --- /dev/null +++ b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap @@ -0,0 +1,9 @@ +--- +source: crates/pixi_core/src/lock_file/satisfiability/mod.rs +assertion_line: 2788 +expression: s +--- +environment 'default' does not satisfy the requirements of the project for platform 'win-64' + Diagnostic severity: error + Caused by: source tree hash for dynamic-dep does not match the hash in the lock-file + Caused by: the computed source tree hash is 'd7bcc86f965f14921af407a3a7cb3ea9398b3c18a9c57b796345678c4ddbfc70', but the lock-file contains '93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d' diff --git a/tests/data/non-satisfiability/pypi-missing-version/pixi.lock b/tests/data/non-satisfiability/pypi-missing-version/pixi.lock new file mode 100644 index 0000000000..fca9c706ab --- /dev/null +++ b/tests/data/non-satisfiability/pypi-missing-version/pixi.lock @@ -0,0 +1,29 @@ +# +# This lock-file should not satisfy the accompanying pixi.toml file. +# +# my-dep is requested as a versioned pypi package (>=1.0) but the lock-file +# entry has no version. A registry dependency always requires a version +# in the lock-file to verify that it matches the requested specifier. + +version: 7 +platforms: +- name: win-64 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + - pypi: https://files.pythonhosted.org/packages/my-dep-1.0.0.whl +packages: +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + # Faked to be empty to reduce the size of the example + depends: [] +- pypi: https://files.pythonhosted.org/packages/my-dep-1.0.0.whl + name: my-dep + sha256: 0000000000000000000000000000000000000000000000000000000000000000 diff --git a/tests/data/non-satisfiability/pypi-missing-version/pixi.toml b/tests/data/non-satisfiability/pypi-missing-version/pixi.toml new file mode 100644 index 0000000000..e17cc0570e --- /dev/null +++ b/tests/data/non-satisfiability/pypi-missing-version/pixi.toml @@ -0,0 +1,13 @@ +[workspace] +channels = ["conda-forge"] +name = "pypi-missing-version" +platforms = ["win-64"] + +[dependencies] +python = "3.12.*" + +# This is a registry dependency that requires a version to be present +# in the lock file. The lock file has been hand-edited to remove +# the version, which should cause the satisfiability check to fail. +[pypi-dependencies] +my-dep = ">=1.0" diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/dynamic_dep/__init__.py b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/dynamic_dep/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/pyproject.toml b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/pyproject.toml new file mode 100644 index 0000000000..592051ee4b --- /dev/null +++ b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/pyproject.toml @@ -0,0 +1,10 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "dynamic-dep" +#dynamic = ["version"] +# The version is no longer dynamic, which should cause the satisfiability +# check to fail because the lock-file doesnt declare a version. +version = "1.0.0" diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py new file mode 100644 index 0000000000..c6c9daab65 --- /dev/null +++ b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py @@ -0,0 +1,2 @@ +from setuptools import setup +setup(version="42.0.0") diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.lock b/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.lock new file mode 100644 index 0000000000..cec6c5f423 --- /dev/null +++ b/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.lock @@ -0,0 +1,26 @@ +version: 7 +platforms: +- name: win-64 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + - pypi: ./dynamic-dep +packages: +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + # Faked to be empty to reduce the size of the example + depends: [] +- pypi: ./dynamic-dep + name: dynamic-dep + sha256: 93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d + # The version in the source is no longer dynamic, which should cause the satisfiability + # check to fail because the lock-file doesnt declare a version. + # TODO: This current works as expected because we use the sha256 hash. But in the future we + # should check the actual metadata instead. diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.toml b/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.toml new file mode 100644 index 0000000000..8997ab3a75 --- /dev/null +++ b/tests/data/non-satisfiability/pypi-no-longer-dynamic/pixi.toml @@ -0,0 +1,10 @@ +[workspace] +channels = ["conda-forge"] +name = "pypi-dynamic-version" +platforms = ["win-64"] + +[dependencies] +python = "3.12.*" + +[pypi-dependencies] +dynamic-dep = { path = "./dynamic-dep" } diff --git a/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/dynamic_dep/__init__.py b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/dynamic_dep/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/pyproject.toml b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/pyproject.toml new file mode 100644 index 0000000000..34bfcef7c3 --- /dev/null +++ b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "dynamic-dep" +dynamic = ["version"] diff --git a/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py new file mode 100644 index 0000000000..c6c9daab65 --- /dev/null +++ b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py @@ -0,0 +1,2 @@ +from setuptools import setup +setup(version="42.0.0") diff --git a/tests/data/satisfiability/pypi-dynamic-version/pixi.lock b/tests/data/satisfiability/pypi-dynamic-version/pixi.lock new file mode 100644 index 0000000000..2478aa7356 --- /dev/null +++ b/tests/data/satisfiability/pypi-dynamic-version/pixi.lock @@ -0,0 +1,22 @@ +version: 7 +platforms: +- name: win-64 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + - pypi: ./dynamic-dep +packages: +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + # Faked to be empty to reduce the size of the example + depends: [] +- pypi: ./dynamic-dep + name: dynamic-dep + sha256: 93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d diff --git a/tests/data/satisfiability/pypi-dynamic-version/pixi.toml b/tests/data/satisfiability/pypi-dynamic-version/pixi.toml new file mode 100644 index 0000000000..8997ab3a75 --- /dev/null +++ b/tests/data/satisfiability/pypi-dynamic-version/pixi.toml @@ -0,0 +1,10 @@ +[workspace] +channels = ["conda-forge"] +name = "pypi-dynamic-version" +platforms = ["win-64"] + +[dependencies] +python = "3.12.*" + +[pypi-dependencies] +dynamic-dep = { path = "./dynamic-dep" } From 53b74b93da8bf667057f16f4696ef8c19609c26d Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Fri, 6 Mar 2026 13:29:53 +0100 Subject: [PATCH 07/15] fix: issues with indexes --- Cargo.lock | 2 + crates/pixi_api/Cargo.toml | 2 + crates/pixi_api/src/workspace/list/package.rs | 62 ++++++++++++------- .../src/lock_file/records_by_name.rs | 5 +- .../src/lock_file/satisfiability/mod.rs | 39 ++++++++++-- ...ng_satisfiability@pypi-index-mismatch.snap | 7 +++ ...satisfiability@pypi-no-longer-dynamic.snap | 3 +- crates/pixi_install_pypi/src/conversions.rs | 25 ++++---- .../pypi-index-mismatch/pixi.lock | 31 ++++++++++ .../pypi-index-mismatch/pixi.toml | 10 +++ .../satisfiability/pypi-index-match/pixi.lock | 30 +++++++++ .../satisfiability/pypi-index-match/pixi.toml | 10 +++ 12 files changed, 180 insertions(+), 46 deletions(-) create mode 100644 crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-index-mismatch.snap create mode 100644 tests/data/non-satisfiability/pypi-index-mismatch/pixi.lock create mode 100644 tests/data/non-satisfiability/pypi-index-mismatch/pixi.toml create mode 100644 tests/data/satisfiability/pypi-index-match/pixi.lock create mode 100644 tests/data/satisfiability/pypi-index-match/pixi.toml diff --git a/Cargo.lock b/Cargo.lock index e420550f6f..9392f95cb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5678,8 +5678,10 @@ dependencies = [ "tracing", "url", "uv-distribution", + "uv-distribution-filename", "uv-distribution-types", "uv-normalize", + "uv-pep508", "uv-pypi-types", "uv-types", ] diff --git a/crates/pixi_api/Cargo.toml b/crates/pixi_api/Cargo.toml index 1603c90b50..69f920e1eb 100644 --- a/crates/pixi_api/Cargo.toml +++ b/crates/pixi_api/Cargo.toml @@ -36,8 +36,10 @@ tempfile = { workspace = true } tokio = { workspace = true, features = ["fs"] } tracing = { workspace = true } url = { workspace = true } +uv-pep508 = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } +uv-distribution-filename = { workspace = true } uv-pypi-types = { workspace = true } uv-normalize = { workspace = true } uv-types = { workspace = true } diff --git a/crates/pixi_api/src/workspace/list/package.rs b/crates/pixi_api/src/workspace/list/package.rs index 3cc4c3b0ff..cb09a5d597 100644 --- a/crates/pixi_api/src/workspace/list/package.rs +++ b/crates/pixi_api/src/workspace/list/package.rs @@ -1,9 +1,13 @@ -use std::{borrow::Cow, collections::HashMap}; - +use pixi_consts::consts; +use pixi_uv_conversions::to_uv_version; use rattler_lock::{CondaPackageData, PypiPackageData, UrlOrPath}; use serde::Serialize; +use std::str::FromStr; +use std::{borrow::Cow, collections::HashMap}; use uv_distribution::RegistryWheelIndex; -use uv_pypi_types::HashAlgorithm; +use uv_distribution_filename::WheelFilename; +use uv_distribution_types::IndexUrl; +use uv_pep508::VerbatimUrl; #[derive(Debug, Clone, Serialize)] pub struct Package { @@ -28,6 +32,7 @@ pub struct Package { pub noarch: Option, pub file_name: Option, pub url: Option, + pub index_url: Option, pub requested_spec: Option, pub constrains: Vec, pub depends: Vec, @@ -73,27 +78,36 @@ impl Package { }, ), PackageExt::PyPI(p, name) => { - // Check the hash to avoid non index packages to be handled by the registry - // index as wheels - if let Some(hash) = &p.hash { - if let Some(registry_index) = registry_index { + if p.hash.is_some() { + let url = p + .index_url + .clone() + .unwrap_or_else(|| consts::DEFAULT_PYPI_INDEX_URL.clone()); + let index = IndexUrl::from(VerbatimUrl::from(url)); + let size = if let Some(registry_index) = registry_index { // Handle case where the registry index is present - let entry = registry_index.get(name).find(|i| { - i.dist.hashes.iter().any(|h| { - (h.algorithm == HashAlgorithm::Sha256 - && hash.sha256().map(|hash| format!("{hash:x}")).as_deref() - == Some(h.digest.as_ref())) - || (h.algorithm == HashAlgorithm::Md5 - && hash.md5().map(|hash| format!("{hash:x}")).as_deref() - == Some(h.digest.as_ref())) - }) + let wheel_filename = p + .location + .file_name() + .and_then(|f| WheelFilename::from_str(f).ok()); + let entry = registry_index.get(name).find(|entry| { + if entry.index.url() != &index { + return false; + } + if let Some(filename) = &wheel_filename { + &entry.dist.filename == filename + } else if let Some(version) = &p.version { + Some(&entry.dist.filename.version) + == to_uv_version(version).ok().as_ref() + } else { + false + } }); - let size = entry.and_then(|e| get_dir_size(e.dist.path.clone()).ok()); - let name = entry.map(|e| e.dist.filename.to_string()); - (size, name) + entry.and_then(|e| get_dir_size(&e.dist.path).ok()) } else { - get_pypi_location_information(&p.location) - } + get_pypi_location_information(&p.location).0 + }; + (size, Some(index.to_string())) } else { get_pypi_location_information(&p.location) } @@ -178,6 +192,11 @@ impl Package { ), }; + let index_url = match package { + PackageExt::PyPI(p, _) => p.index_url.as_ref().map(|u| u.to_string()), + PackageExt::Conda(_) => None, + }; + let requested_spec = requested_specs.get(&name).cloned(); let is_explicit = requested_spec.is_some(); @@ -225,6 +244,7 @@ impl Package { noarch, file_name, url, + index_url, requested_spec, constrains, depends, diff --git a/crates/pixi_core/src/lock_file/records_by_name.rs b/crates/pixi_core/src/lock_file/records_by_name.rs index 7169dbd055..00ed216793 100644 --- a/crates/pixi_core/src/lock_file/records_by_name.rs +++ b/crates/pixi_core/src/lock_file/records_by_name.rs @@ -167,10 +167,9 @@ impl DependencyRecordsByName { // dependency with a dynamic version), keep the existing entry. let idx = *entry.get(); if let (Some(existing), Some(new)) = (records[idx].version(), record.version()) + && existing < new { - if existing < new { - records[idx] = record; - } + records[idx] = record; } } } diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index f88a083b42..e6e1252942 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -395,6 +395,13 @@ pub enum PlatformUnsat { locked_path: String, }, + #[error("'{name}' requires index {expected_index} but the lock-file has {locked_index}")] + LockedPyPIIndexMismatch { + name: String, + expected_index: String, + locked_index: String, + }, + #[error("failed to convert between pep508 and uv types: {0}")] UvTypesConversionError(#[from] ConversionError), @@ -1040,7 +1047,9 @@ pub(crate) fn pypi_satisfies_requirement( } match &spec.source { - RequirementSource::Registry { specifier, .. } => { + RequirementSource::Registry { + specifier, index, .. + } => { // If the locked package has no version (e.g. a source dependency with // dynamic version), it cannot satisfy a registry version specifier. let Some(locked_version) = &locked_data.version else { @@ -1052,18 +1061,36 @@ pub(crate) fn pypi_satisfies_requirement( .into()); }; let version_string = locked_version.to_string(); - if specifier.contains( + if !specifier.contains( &uv_pep440::Version::from_str(&version_string).expect("could not parse version"), ) { - Ok(()) - } else { - Err(PlatformUnsat::LockedPyPIVersionsMismatch { + return Err(PlatformUnsat::LockedPyPIVersionsMismatch { name: spec.name.clone().to_string(), specifiers: specifier.clone().to_string(), version: version_string, } - .into()) + .into()); } + + // If the requirement specifies an explicit index, verify the lock-file matches + if let Some(required_index) = index { + let required_url: Url = required_index.url.url().clone().into(); + match &locked_data.index_url { + Some(locked_url) if locked_url == &required_url => {} + other => { + return Err(PlatformUnsat::LockedPyPIIndexMismatch { + name: spec.name.to_string(), + expected_index: required_url.to_string(), + locked_index: other + .as_ref() + .map_or("".to_string(), |u| u.to_string()), + } + .into()); + } + } + } + + Ok(()) } RequirementSource::Url { url: spec_url, .. } => { if let UrlOrPath::Url(locked_url) = &*locked_data.location { diff --git a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-index-mismatch.snap b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-index-mismatch.snap new file mode 100644 index 0000000000..3702db7219 --- /dev/null +++ b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-index-mismatch.snap @@ -0,0 +1,7 @@ +--- +source: crates/pixi_core/src/lock_file/satisfiability/mod.rs +expression: s +--- +environment 'default' does not satisfy the requirements of the project for platform 'win-64' + Diagnostic severity: error + Caused by: 'my-dep' requires index https://custom.example.com/simple but the lock-file has https://other.example.com/simple diff --git a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap index d3b77da011..a6515b6004 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap +++ b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap @@ -1,9 +1,8 @@ --- source: crates/pixi_core/src/lock_file/satisfiability/mod.rs -assertion_line: 2788 expression: s --- environment 'default' does not satisfy the requirements of the project for platform 'win-64' Diagnostic severity: error Caused by: source tree hash for dynamic-dep does not match the hash in the lock-file - Caused by: the computed source tree hash is 'd7bcc86f965f14921af407a3a7cb3ea9398b3c18a9c57b796345678c4ddbfc70', but the lock-file contains '93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d' + Caused by: the computed source tree hash is '50db39844d97e7135f4c4f85d97e02b97f82179de450ca879c12a8fd5db456cc', but the lock-file contains '93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d' diff --git a/crates/pixi_install_pypi/src/conversions.rs b/crates/pixi_install_pypi/src/conversions.rs index 075f529eaf..dc4e0f0e16 100644 --- a/crates/pixi_install_pypi/src/conversions.rs +++ b/crates/pixi_install_pypi/src/conversions.rs @@ -1,6 +1,5 @@ use std::path::Path; use std::str::FromStr; -use std::sync::Arc; use pixi_consts::consts; use pixi_record::LockedGitUrl; @@ -19,6 +18,15 @@ use uv_pypi_types::{HashAlgorithm, HashDigest, ParsedUrl, ParsedUrlError, Verbat use super::utils::{is_direct_url, strip_direct_scheme}; +/// Build an [`IndexUrl`] from the lock-file's optional `index_url`. +/// Falls back to `DEFAULT_PYPI_INDEX_URL` when the lock-file has no stored index. +fn index_url_from_lock(index: Option<&Url>) -> IndexUrl { + let url = index + .cloned() + .unwrap_or_else(|| consts::DEFAULT_PYPI_INDEX_URL.clone()); + IndexUrl::from(uv_pep508::VerbatimUrl::from(url)) +} + /// Converts our locked data to a file pub fn locked_data_to_file( url: &Url, @@ -164,15 +172,7 @@ pub fn convert_to_dist( wheels: vec![RegistryBuiltWheel { filename, file: Box::new(file), - // This should be fine because currently it is only used for caching - // When upgrading uv and running into problems we would need to sort this - // out but it would require adding the indexes to - // the lock file - index: IndexUrl::Pypi(Arc::new(uv_pep508::VerbatimUrl::from_url( - uv_redacted::DisplaySafeUrl::from( - consts::DEFAULT_PYPI_INDEX_URL.clone(), - ), - ))), + index: index_url_from_lock(pkg.index_url.as_ref()), }], best_wheel_index: 0, sdist: None, @@ -188,10 +188,7 @@ pub fn convert_to_dist( name: pkg_name, version: pkg_version, file: Box::new(file), - // This should be fine because currently it is only used for caching - index: IndexUrl::Pypi(Arc::new(uv_pep508::VerbatimUrl::from_url( - uv_redacted::DisplaySafeUrl::from(consts::DEFAULT_PYPI_INDEX_URL.clone()), - ))), + index: index_url_from_lock(pkg.index_url.as_ref()), // I don't think this really matters for the install wheels: vec![], ext: SourceDistExtension::from_path(Path::new(filename_raw)).map_err(|e| { diff --git a/tests/data/non-satisfiability/pypi-index-mismatch/pixi.lock b/tests/data/non-satisfiability/pypi-index-mismatch/pixi.lock new file mode 100644 index 0000000000..6bec7b9b87 --- /dev/null +++ b/tests/data/non-satisfiability/pypi-index-mismatch/pixi.lock @@ -0,0 +1,31 @@ +# +# This lock-file should not satisfy the accompanying pixi.toml file. +# +# my-dep is requested from index https://custom.example.com/simple but the +# lock-file stores a different index (https://other.example.com/simple). +# The satisfiability check should detect this mismatch and fail. + +version: 7 +platforms: + - name: win-64 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + - pypi: https://other.example.com/simple/packages/my_dep-1.0.0-py3-none-any.whl +packages: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + # Faked to be empty to reduce the size of the example + depends: [] + - pypi: https://other.example.com/simple/packages/my_dep-1.0.0-py3-none-any.whl + name: my-dep + version: 1.0.0 + sha256: 0000000000000000000000000000000000000000000000000000000000000000 + index: https://other.example.com/simple diff --git a/tests/data/non-satisfiability/pypi-index-mismatch/pixi.toml b/tests/data/non-satisfiability/pypi-index-mismatch/pixi.toml new file mode 100644 index 0000000000..615be6b82d --- /dev/null +++ b/tests/data/non-satisfiability/pypi-index-mismatch/pixi.toml @@ -0,0 +1,10 @@ +[workspace] +channels = ["conda-forge"] +name = "pypi-index-mismatch" +platforms = ["win-64"] + +[dependencies] +python = "3.12.*" + +[pypi-dependencies] +my-dep = { version = ">=1.0", index = "https://custom.example.com/simple" } diff --git a/tests/data/satisfiability/pypi-index-match/pixi.lock b/tests/data/satisfiability/pypi-index-match/pixi.lock new file mode 100644 index 0000000000..19e79ca76a --- /dev/null +++ b/tests/data/satisfiability/pypi-index-match/pixi.lock @@ -0,0 +1,30 @@ +# +# This lock-file should satisfy the accompanying pixi.toml file. +# +# my-dep is requested from index https://custom.example.com/simple and the +# lock-file stores the same index_url, so the satisfiability check should pass. + +version: 7 +platforms: + - name: win-64 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + - pypi: https://custom.example.com/simple/packages/my_dep-1.0.0-py3-none-any.whl +packages: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + # Faked to be empty to reduce the size of the example + depends: [] + - pypi: https://custom.example.com/simple/packages/my_dep-1.0.0-py3-none-any.whl + name: my-dep + version: 1.0.0 + sha256: 0000000000000000000000000000000000000000000000000000000000000000 + index: https://custom.example.com/simple diff --git a/tests/data/satisfiability/pypi-index-match/pixi.toml b/tests/data/satisfiability/pypi-index-match/pixi.toml new file mode 100644 index 0000000000..640a5c8d4b --- /dev/null +++ b/tests/data/satisfiability/pypi-index-match/pixi.toml @@ -0,0 +1,10 @@ +[workspace] +channels = ["conda-forge"] +name = "pypi-index-match" +platforms = ["win-64"] + +[dependencies] +python = "3.12.*" + +[pypi-dependencies] +my-dep = { version = ">=1.0", index = "https://custom.example.com/simple" } From 6834f830e44ab9b6cc401eecba8d0522bde49ffa Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Fri, 6 Mar 2026 15:50:53 +0100 Subject: [PATCH 08/15] fix: lint issues --- Cargo.lock | 2 -- crates/pixi_api/Cargo.toml | 1 - crates/pixi_build_mojo/Cargo.toml | 7 +++---- crates/pixi_core/src/lock_file/update.rs | 11 +++++++---- .../pypi-no-longer-dynamic/dynamic-dep/setup.py | 1 + .../pypi-dynamic-version/dynamic-dep/setup.py | 1 + 6 files changed, 12 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9392f95cb0..5e3ecb64df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5682,7 +5682,6 @@ dependencies = [ "uv-distribution-types", "uv-normalize", "uv-pep508", - "uv-pypi-types", "uv-types", ] @@ -5835,7 +5834,6 @@ dependencies = [ "minijinja", "pixi_build_backend", "pixi_build_types", - "rattler_build_core", "rattler_build_jinja", "rattler_build_types", "rattler_conda_types", diff --git a/crates/pixi_api/Cargo.toml b/crates/pixi_api/Cargo.toml index 69f920e1eb..fd7cc7038a 100644 --- a/crates/pixi_api/Cargo.toml +++ b/crates/pixi_api/Cargo.toml @@ -40,6 +40,5 @@ uv-pep508 = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } uv-distribution-filename = { workspace = true } -uv-pypi-types = { workspace = true } uv-normalize = { workspace = true } uv-types = { workspace = true } diff --git a/crates/pixi_build_mojo/Cargo.toml b/crates/pixi_build_mojo/Cargo.toml index 010faa7fa3..0e730d4bfb 100644 --- a/crates/pixi_build_mojo/Cargo.toml +++ b/crates/pixi_build_mojo/Cargo.toml @@ -13,8 +13,8 @@ dist = false [features] default = ["rustls-tls"] -native-tls = ["pixi_build_backend/native-tls", "rattler_build_core/native-tls"] -rustls-tls = ["pixi_build_backend/rustls-tls", "rattler_build_core/rustls-tls"] +native-tls = ["pixi_build_backend/native-tls"] +rustls-tls = ["pixi_build_backend/rustls-tls"] [dependencies] async-trait = { workspace = true } @@ -22,12 +22,10 @@ fs-err = { workspace = true } indexmap = { workspace = true } miette = { workspace = true } minijinja = { workspace = true } -rattler_build_core = { workspace = true } rattler_build_jinja = { workspace = true } rattler_build_types = { workspace = true } rattler_conda_types = { workspace = true } serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } tempfile = { workspace = true } tokio = { workspace = true, features = ["macros"] } @@ -39,3 +37,4 @@ recipe_stage0 = { workspace = true } [dev-dependencies] insta = { workspace = true, features = ["yaml", "redactions", "filters"] } rstest = { workspace = true } +serde_json = { workspace = true } diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index 2bec59eb8d..ce0c25ad9a 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -713,7 +713,10 @@ impl<'p> LockFileDerivedData<'p> { ( data.clone(), pixi_install_pypi::ManifestData { - editable: is_editable_from_manifest(&manifest_pypi_deps, &data.name), + editable: is_editable_from_manifest( + &manifest_pypi_deps, + &data.name, + ), }, ) }) @@ -2778,7 +2781,7 @@ mod tests { // Simulate tool.pixi.pypi-dependencies (added first) let path_spec = PixiPypiSpec::new(pixi_pypi_spec::PixiPypiSource::Path { - path: "./requests".into(), + path: std::path::PathBuf::from("./requests").into(), editable: Some(true), }); deps.insert(name.clone(), path_spec); @@ -2823,14 +2826,14 @@ mod tests { // Higher-priority feature explicitly sets editable=false (inserted first) let non_editable_spec = PixiPypiSpec::new(pixi_pypi_spec::PixiPypiSource::Path { - path: "./requests".into(), + path: std::path::PathBuf::from("./requests").into(), editable: Some(false), }); deps.insert(name.clone(), non_editable_spec); // Lower-priority feature has editable=true (inserted second) let editable_spec = PixiPypiSpec::new(pixi_pypi_spec::PixiPypiSource::Path { - path: "./requests".into(), + path: std::path::PathBuf::from("./requests").into(), editable: Some(true), }); deps.insert(name.clone(), editable_spec); diff --git a/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py index c6c9daab65..2b0edaf440 100644 --- a/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py +++ b/tests/data/non-satisfiability/pypi-no-longer-dynamic/dynamic-dep/setup.py @@ -1,2 +1,3 @@ from setuptools import setup + setup(version="42.0.0") diff --git a/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py index c6c9daab65..2b0edaf440 100644 --- a/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py +++ b/tests/data/satisfiability/pypi-dynamic-version/dynamic-dep/setup.py @@ -1,2 +1,3 @@ from setuptools import setup + setup(version="42.0.0") From fec73b5e4a30a524f2c898e68f194e7b00a4763f Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Tue, 10 Mar 2026 10:55:18 +0100 Subject: [PATCH 09/15] feat: wire UnresolvedPixiRecord into lock-file boundary MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit All 3 lock-file read sites now use UnresolvedPixiRecord. Partial source records (from mutable path-based sources) are resolved to full PixiRecord via source_metadata() at the read boundary. No partial records propagate downstream. Write path: mutable source packages are now stored as partial records in the lock file (only name, depends, sources — no version/build/subdir). Immutable sources (git, url) remain full. Removed panicking PixiRecord::from_conda_package_data() and SourceRecord::from_conda_source_data() — all callers now go through UnresolvedPixiRecord which handles partial data safely. --- Cargo.lock | 22 - Cargo.toml | 38 +- crates/pixi_api/src/workspace/list/mod.rs | 7 +- crates/pixi_api/src/workspace/list/package.rs | 50 +- crates/pixi_cli/src/tree.rs | 8 +- .../src/install_pixi/mod.rs | 14 +- .../src/package_identifier.rs | 4 +- .../src/solve_conda/mod.rs | 20 +- .../solve_pixi/source_metadata_collector.rs | 8 +- .../src/source_build/mod.rs | 2 +- .../src/source_metadata/mod.rs | 9 +- crates/pixi_core/src/environment/mod.rs | 10 +- .../pixi_core/src/lock_file/install_subset.rs | 11 +- crates/pixi_core/src/lock_file/outdated.rs | 22 +- .../src/lock_file/records_by_name.rs | 2 +- .../pixi_core/src/lock_file/resolve/pypi.rs | 2 +- .../src/lock_file/satisfiability/mod.rs | 381 +++++++--- ...satisfiability@pypi-no-longer-dynamic.snap | 2 +- crates/pixi_core/src/lock_file/update.rs | 352 ++++++++- .../src/lock_file/virtual_packages.rs | 61 +- crates/pixi_core/src/workspace/mod.rs | 2 +- crates/pixi_diff/src/lib.rs | 96 ++- crates/pixi_record/src/lib.rs | 185 ++++- crates/pixi_record/src/pinned_source.rs | 12 +- ...d__tests__roundtrip_conda_source_data.snap | 12 - crates/pixi_record/src/source_record.rs | 705 ++++++++++++++---- .../test_fixtures/full_source_records.lock | 57 ++ crates/pixi_spec/src/lib.rs | 7 +- crates/pypi_modifiers/src/pypi_tags.rs | 5 + examples/pixi-build/cpp-sdl/pixi.lock | 662 ++++++++-------- .../pypi-dynamic-version/pixi.lock | 2 +- 31 files changed, 1879 insertions(+), 891 deletions(-) create mode 100644 crates/pixi_record/src/test_fixtures/full_source_records.lock diff --git a/Cargo.lock b/Cargo.lock index 5e3ecb64df..3abec9631a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,7 +1695,6 @@ dependencies = [ [[package]] name = "coalesced_map" version = "0.1.2" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "dashmap", "tokio", @@ -2742,7 +2741,6 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "file_url" version = "0.2.7" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "itertools 0.14.0", "percent-encoding", @@ -5408,7 +5406,6 @@ checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" [[package]] name = "path_resolver" version = "0.2.7" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "fs-err", @@ -7422,7 +7419,6 @@ dependencies = [ [[package]] name = "rattler" version = "0.40.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "clap", @@ -7736,7 +7732,6 @@ dependencies = [ [[package]] name = "rattler_cache" version = "0.6.16" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -7768,7 +7763,6 @@ dependencies = [ [[package]] name = "rattler_conda_types" version = "0.44.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7810,7 +7804,6 @@ dependencies = [ [[package]] name = "rattler_config" version = "0.3.4" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "console 0.16.3", "fs-err", @@ -7827,7 +7820,6 @@ dependencies = [ [[package]] name = "rattler_digest" version = "1.2.3" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "blake2", "digest", @@ -7904,7 +7896,6 @@ dependencies = [ [[package]] name = "rattler_lock" version = "0.27.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7930,7 +7921,6 @@ dependencies = [ [[package]] name = "rattler_macros" version = "1.0.12" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "quote", "syn", @@ -7939,7 +7929,6 @@ dependencies = [ [[package]] name = "rattler_menuinst" version = "0.2.51" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "configparser", @@ -7969,7 +7958,6 @@ dependencies = [ [[package]] name = "rattler_networking" version = "0.26.4" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "async-once-cell", @@ -8002,7 +7990,6 @@ dependencies = [ [[package]] name = "rattler_package_streaming" version = "0.24.4" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "astral-tokio-tar 0.6.0", "astral_async_zip", @@ -8054,7 +8041,6 @@ dependencies = [ [[package]] name = "rattler_pty" version = "0.2.9" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "libc", "nix 0.30.1", @@ -8065,7 +8051,6 @@ dependencies = [ [[package]] name = "rattler_redaction" version = "0.1.13" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "reqwest 0.12.28", "reqwest-middleware", @@ -8075,7 +8060,6 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" version = "0.27.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -8137,7 +8121,6 @@ dependencies = [ [[package]] name = "rattler_s3" version = "0.1.27" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "aws-config", "aws-credential-types", @@ -8153,7 +8136,6 @@ dependencies = [ [[package]] name = "rattler_shell" version = "0.26.4" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "enum_dispatch", @@ -8173,7 +8155,6 @@ dependencies = [ [[package]] name = "rattler_solve" version = "5.0.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "futures", @@ -8191,7 +8172,6 @@ dependencies = [ [[package]] name = "rattler_upload" version = "0.5.1" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "base64 0.22.1", "clap", @@ -8227,7 +8207,6 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" version = "2.3.13" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "archspec", "libloading", @@ -9895,7 +9874,6 @@ dependencies = [ [[package]] name = "simple_spawn_blocking" version = "1.1.0" -source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "tokio", ] diff --git a/Cargo.toml b/Cargo.toml index 98781749d3..960efde26e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -256,25 +256,25 @@ reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "d8efd77673c9a90792da9da31b6c0da7ea8a324b" } -coalesced_map = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -file_url = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_cache = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_conda_types = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_config = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_digest = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_lock = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_menuinst = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_networking = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_package_streaming = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_repodata_gateway = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_shell = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_solve = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_upload = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_redaction = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_virtual_packages = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -rattler_s3 = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } -simple_spawn_blocking = { git = "https://github.com/conda/rattler", branch = "feature/lockfile-v7" } +coalesced_map = { path = "F:/projects/rattler/crates/coalesced_map" } +file_url = { path = "F:/projects/rattler/crates/file_url" } +rattler = { path = "F:/projects/rattler/crates/rattler" } +rattler_cache = { path = "F:/projects/rattler/crates/rattler_cache" } +rattler_conda_types = { path = "F:/projects/rattler/crates/rattler_conda_types" } +rattler_config = { path = "F:/projects/rattler/crates/rattler_config" } +rattler_digest = { path = "F:/projects/rattler/crates/rattler_digest" } +rattler_lock = { path = "F:/projects/rattler/crates/rattler_lock" } +rattler_menuinst = { path = "F:/projects/rattler/crates/rattler_menuinst" } +rattler_networking = { path = "F:/projects/rattler/crates/rattler_networking" } +rattler_package_streaming = { path = "F:/projects/rattler/crates/rattler_package_streaming" } +rattler_repodata_gateway = { path = "F:/projects/rattler/crates/rattler_repodata_gateway" } +rattler_shell = { path = "F:/projects/rattler/crates/rattler_shell" } +rattler_solve = { path = "F:/projects/rattler/crates/rattler_solve" } +rattler_upload = { path = "F:/projects/rattler/crates/rattler_upload" } +rattler_redaction = { path = "F:/projects/rattler/crates/rattler_redaction" } +rattler_virtual_packages = { path = "F:/projects/rattler/crates/rattler_virtual_packages" } +rattler_s3 = { path = "F:/projects/rattler/crates/rattler_s3" } +simple_spawn_blocking = { path = "F:/projects/rattler/crates/simple_spawn_blocking" } [patch."https://github.com/prefix-dev/rattler-build"] #rattler-build = { path = "/var/home/tobias/src/rattler-build" } diff --git a/crates/pixi_api/src/workspace/list/mod.rs b/crates/pixi_api/src/workspace/list/mod.rs index a3f2591a49..6cd2192191 100644 --- a/crates/pixi_api/src/workspace/list/mod.rs +++ b/crates/pixi_api/src/workspace/list/mod.rs @@ -7,7 +7,7 @@ use pixi_core::{ }; use pixi_manifest::FeaturesExt; use pixi_uv_conversions::{ConversionError, pypi_options_to_index_locations, to_uv_normalize}; -use pypi_modifiers::pypi_tags::{get_pypi_tags, is_python_record}; +use pypi_modifiers::pypi_tags::{get_pypi_tags, is_python_package_name}; use rattler_conda_types::Platform; use rattler_lock::LockedPackageRef; use uv_distribution::RegistryWheelIndex; @@ -71,7 +71,7 @@ pub async fn list( let mut conda_records = locked_deps_ext.iter().filter_map(|d| d.as_conda()); // Construct the registry index if we have a python record - let python_record = conda_records.find(|r| is_python_record(r)); + let python_record = conda_records.find(|r| is_python_package_name(r.name())); let tags; let uv_context; let index_locations; @@ -87,10 +87,11 @@ pub async fn list( index_locations = pypi_options_to_index_locations(&environment.pypi_options(), workspace.root()) .into_diagnostic()?; + let record = python_record.record().expect("python record should have full metadata"); tags = get_pypi_tags( platform, &environment.system_requirements(), - python_record.record(), + record, )?; Some(RegistryWheelIndex::new( &uv_context.cache, diff --git a/crates/pixi_api/src/workspace/list/package.rs b/crates/pixi_api/src/workspace/list/package.rs index cb09a5d597..14c22feb3e 100644 --- a/crates/pixi_api/src/workspace/list/package.rs +++ b/crates/pixi_api/src/workspace/list/package.rs @@ -57,18 +57,18 @@ impl Package { let kind = PackageKind::from(package); let build = match package { - PackageExt::Conda(pkg) => Some(pkg.record().build.clone()), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.build.clone()), PackageExt::PyPI(_, _) => None, }; let build_number = match package { - PackageExt::Conda(pkg) => Some(pkg.record().build_number), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.build_number), PackageExt::PyPI(_, _) => None, }; let (size_bytes, source) = match package { PackageExt::Conda(pkg) => ( - pkg.record().size, + pkg.record().and_then(|r| r.size), match pkg { CondaPackageData::Source(source) => Some(source.location.to_string()), CondaPackageData::Binary(binary) => binary @@ -115,17 +115,17 @@ impl Package { }; let license = match package { - PackageExt::Conda(pkg) => pkg.record().license.clone(), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.license.clone()), PackageExt::PyPI(_, _) => None, }; let license_family = match package { - PackageExt::Conda(pkg) => pkg.record().license_family.clone(), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.license_family.clone()), PackageExt::PyPI(_, _) => None, }; let md5 = match package { - PackageExt::Conda(pkg) => pkg.record().md5.map(|h| format!("{h:x}")), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.md5.map(|h| format!("{h:x}"))), PackageExt::PyPI(p, _) => p .hash .as_ref() @@ -133,7 +133,7 @@ impl Package { }; let sha256 = match package { - PackageExt::Conda(pkg) => pkg.record().sha256.map(|h| format!("{h:x}")), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.sha256.map(|h| format!("{h:x}"))), PackageExt::PyPI(p, _) => p .hash .as_ref() @@ -141,35 +141,37 @@ impl Package { }; let arch = match package { - PackageExt::Conda(pkg) => pkg.record().arch.clone(), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.arch.clone()), PackageExt::PyPI(_, _) => None, }; let platform = match package { - PackageExt::Conda(pkg) => pkg.record().platform.clone(), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.platform.clone()), PackageExt::PyPI(_, _) => None, }; let subdir = match package { - PackageExt::Conda(pkg) => Some(pkg.record().subdir.clone()), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.subdir.clone()), PackageExt::PyPI(_, _) => None, }; let timestamp = match package { - PackageExt::Conda(pkg) => pkg.record().timestamp.map(|ts| ts.timestamp_millis()), + PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.timestamp.map(|ts| ts.timestamp_millis())), PackageExt::PyPI(_, _) => None, }; let noarch = match package { PackageExt::Conda(pkg) => { - let noarch_type = &pkg.record().noarch; - if noarch_type.is_python() { - Some("python".to_string()) - } else if noarch_type.is_generic() { - Some("generic".to_string()) - } else { - None - } + pkg.record().and_then(|r| { + let noarch_type = &r.noarch; + if noarch_type.is_python() { + Some("python".to_string()) + } else if noarch_type.is_generic() { + Some("generic".to_string()) + } else { + None + } + }) } PackageExt::PyPI(_, _) => None, }; @@ -209,17 +211,17 @@ impl Package { }; let constrains = match package { - PackageExt::Conda(pkg) => pkg.record().constrains.clone(), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.constrains.clone()).unwrap_or_default(), PackageExt::PyPI(_, _) => Vec::new(), }; let depends = match package { - PackageExt::Conda(pkg) => pkg.record().depends.clone(), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.depends.clone()).unwrap_or_default(), PackageExt::PyPI(p, _) => p.requires_dist.iter().map(|r| r.to_string()).collect(), }; let track_features = match package { - PackageExt::Conda(pkg) => pkg.record().track_features.clone(), + PackageExt::Conda(pkg) => pkg.record().map(|r| r.track_features.clone()).unwrap_or_default(), PackageExt::PyPI(_, _) => Vec::new(), }; @@ -317,7 +319,7 @@ impl PackageExt { /// Returns the name of the package. pub fn name(&self) -> Cow<'_, str> { match self { - Self::Conda(value) => value.record().name.as_normalized().into(), + Self::Conda(value) => value.name().as_normalized().into(), Self::PyPI(value, _) => value.name.as_dist_info_name(), } } @@ -325,7 +327,7 @@ impl PackageExt { /// Returns the version string of the package pub fn version(&self) -> Option { match self { - Self::Conda(value) => Some(value.record().version.to_string()), + Self::Conda(value) => value.record().map(|r| r.version.to_string()), Self::PyPI(value, _) => value.version.as_ref().map(|v| v.to_string()), } } diff --git a/crates/pixi_cli/src/tree.rs b/crates/pixi_cli/src/tree.rs index fd15cc8380..9799bf5e39 100644 --- a/crates/pixi_cli/src/tree.rs +++ b/crates/pixi_cli/src/tree.rs @@ -123,11 +123,9 @@ pub(crate) fn extract_package_info( package: rattler_lock::LockedPackageRef<'_>, ) -> Option { if let Some(conda_package) = package.as_conda() { - let name = conda_package.record().name.as_normalized().to_string(); + let name = conda_package.name().as_normalized().to_string(); - let dependencies: Vec = conda_package - .record() - .depends + let dependencies: Vec = conda_package.depends() .iter() .map(|d| { d.split_once(' ') @@ -182,7 +180,7 @@ pub fn generate_dependency_map(locked_deps: &[LockedPackageRef<'_>]) -> HashMap< name: package_info.name, version: match package { LockedPackageRef::Conda(conda_data) => { - conda_data.record().version.to_string() + conda_data.record().map(|r| r.version.to_string()).unwrap_or_default() } LockedPackageRef::Pypi(pypi_data) => pypi_data.version_string(), }, diff --git a/crates/pixi_command_dispatcher/src/install_pixi/mod.rs b/crates/pixi_command_dispatcher/src/install_pixi/mod.rs index bc34e96b59..1c4a953ede 100644 --- a/crates/pixi_command_dispatcher/src/install_pixi/mod.rs +++ b/crates/pixi_command_dispatcher/src/install_pixi/mod.rs @@ -140,7 +140,7 @@ impl InstallPixiEnvironmentSpec { if self .ignore_packages .as_ref() - .is_some_and(|ignore| ignore.contains(&source_record.package_record.name)) + .is_some_and(|ignore| ignore.contains(&source_record.package_record().name)) { continue; } @@ -213,13 +213,13 @@ impl InstallPixiEnvironmentSpec { // Verify if we need to force the build even if the cache is up to date. let force = self .force_reinstall - .contains(&source_record.package_record.name); + .contains(&source_record.package_record().name); let built_source = command_dispatcher .source_build(SourceBuildSpec { source: PinnedSourceCodeLocation::new( - source_record.manifest_source.clone(), - source_record.build_source.clone(), + source_record.manifest_source().clone(), + source_record.build_source().cloned(), ), package: source_record.into(), channel_config: self.channel_config.clone(), @@ -227,7 +227,7 @@ impl InstallPixiEnvironmentSpec { build_environment: self.build_environment.clone(), variant_configuration: self.variant_configuration.clone(), variant_files: self.variant_files.clone(), - variants: source_record.variants.clone(), + variants: source_record.variants().clone(), enabled_protocols: self.enabled_protocols.clone(), output_directory: None, work_directory: None, @@ -253,8 +253,8 @@ pub enum InstallPixiEnvironmentError { Installer(InstallerError), #[error("failed to build '{}' from '{}'", - .0.package_record.name.as_source(), - .0.manifest_source)] + .0.package_record().name.as_source(), + .0.manifest_source())] BuildSourceError( Box, #[diagnostic_source] diff --git a/crates/pixi_command_dispatcher/src/package_identifier.rs b/crates/pixi_command_dispatcher/src/package_identifier.rs index cd2104ca98..bf88c68497 100644 --- a/crates/pixi_command_dispatcher/src/package_identifier.rs +++ b/crates/pixi_command_dispatcher/src/package_identifier.rs @@ -59,12 +59,12 @@ impl<'a> From<&'a PackageRecord> for PackageIdentifier { impl From for PackageIdentifier { fn from(record: SourceRecord) -> Self { - record.package_record.into() + record.data.package_record.into() } } impl<'a> From<&'a SourceRecord> for PackageIdentifier { fn from(record: &'a SourceRecord) -> Self { - (&record.package_record).into() + (&record.data.package_record).into() } } diff --git a/crates/pixi_command_dispatcher/src/solve_conda/mod.rs b/crates/pixi_command_dispatcher/src/solve_conda/mod.rs index 6dee7041fa..275c84f7f8 100644 --- a/crates/pixi_command_dispatcher/src/solve_conda/mod.rs +++ b/crates/pixi_command_dispatcher/src/solve_conda/mod.rs @@ -125,7 +125,7 @@ impl SolveCondaEnvironmentSpec { .source_repodata .iter() .flat_map(|metadata| &metadata.records) - .map(|metadata| &metadata.package_record.name) + .map(|metadata| &metadata.package_record().name) .dedup() .collect::>(); @@ -190,13 +190,13 @@ impl SolveCondaEnvironmentSpec { for record in &source_metadata.records { let url = unique_url(record); let repodata_record = RepoDataRecord { - package_record: record.package_record.clone(), + package_record: record.data.package_record.clone(), url: url.clone(), identifier: DistArchiveIdentifier { identifier: ArchiveIdentifier { - name: record.package_record.name.as_normalized().to_string(), - version: record.package_record.version.to_string(), - build_string: format!("{}_source", record.package_record.build), + name: record.package_record().name.as_normalized().to_string(), + version: record.package_record().version.to_string(), + build_string: format!("{}_source", record.package_record().build), }, archive_type: CondaArchiveType::Conda.into(), }, @@ -341,14 +341,14 @@ impl SolveCondaEnvironmentSpec { /// Generates a unique URL for a source record. fn unique_url(source: &SourceRecord) -> Url { - let mut url = source.manifest_source.identifiable_url(); + let mut url = source.manifest_source().identifiable_url(); // Add unique identifiers to the URL. url.query_pairs_mut() - .append_pair("name", source.package_record.name.as_source()) - .append_pair("version", &source.package_record.version.as_str()) - .append_pair("build", &source.package_record.build) - .append_pair("subdir", &source.package_record.subdir); + .append_pair("name", source.package_record().name.as_source()) + .append_pair("version", &source.package_record().version.as_str()) + .append_pair("build", &source.package_record().build) + .append_pair("subdir", &source.package_record().subdir); url } diff --git a/crates/pixi_command_dispatcher/src/solve_pixi/source_metadata_collector.rs b/crates/pixi_command_dispatcher/src/solve_pixi/source_metadata_collector.rs index 48e21a8d4a..5948787f05 100644 --- a/crates/pixi_command_dispatcher/src/solve_pixi/source_metadata_collector.rs +++ b/crates/pixi_command_dispatcher/src/solve_pixi/source_metadata_collector.rs @@ -126,10 +126,10 @@ impl SourceMetadataCollector { // Process transitive dependencies for record in &source_metadata.records { - chain.push(record.package_record.name.clone()); + chain.push(record.package_record().name.clone()); let anchor = - SourceAnchor::from(SourceLocationSpec::from(record.manifest_source.clone())); - for depend in &record.package_record.depends { + SourceAnchor::from(SourceLocationSpec::from(record.manifest_source().clone())); + for depend in &record.package_record().depends { if let Ok(spec) = MatchSpec::from_str(depend, ParseStrictness::Lenient) { let (PackageNameMatcher::Exact(name), nameless_spec) = spec.clone().into_nameless() @@ -138,7 +138,7 @@ impl SourceMetadataCollector { "non exact packages names are not supported in {depend}" ); }; - if let Some(source_location) = record.sources.get(name.as_normalized()) { + if let Some(source_location) = record.sources().get(name.as_normalized()) { // We encountered a transitive source dependency. let resolved_location = anchor.resolve(source_location.clone()); specs.push(( diff --git a/crates/pixi_command_dispatcher/src/source_build/mod.rs b/crates/pixi_command_dispatcher/src/source_build/mod.rs index 4274e276ac..403482998e 100644 --- a/crates/pixi_command_dispatcher/src/source_build/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_build/mod.rs @@ -517,7 +517,7 @@ impl SourceBuildSpec { PixiRecord::Source(source) => { let repodata_record = prefix .resolved_source_records - .get(&source.package_record.name) + .get(&source.data.package_record.name) .cloned() .expect("the source record should be present in the result sources"); BuildHostPackage { diff --git a/crates/pixi_command_dispatcher/src/source_metadata/mod.rs b/crates/pixi_command_dispatcher/src/source_metadata/mod.rs index 6c35ce58ff..ad570b667f 100644 --- a/crates/pixi_command_dispatcher/src/source_metadata/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_metadata/mod.rs @@ -10,7 +10,7 @@ use futures::TryStreamExt; use itertools::{Either, Itertools}; use miette::Diagnostic; use pixi_build_types::procedures::conda_outputs::CondaOutput; -use pixi_record::{PixiRecord, SourceRecord}; +use pixi_record::{FullSourceRecordData, PixiRecord, SourceRecord}; use pixi_spec::{BinarySpec, PixiSpec, SourceAnchor, SourceLocationSpec, SpecConversionError}; use pixi_spec_containers::DependencyMap; use rattler_conda_types::{ @@ -200,11 +200,14 @@ impl SourceMetadataSpec { variants, sources, }| SourceRecord { - package_record, + data: FullSourceRecordData { + package_record, + sources, + }, variants, - sources, manifest_source: source.manifest_source().clone(), build_source: source.build_source().cloned(), + identifier_hash: None, }, ) .collect() diff --git a/crates/pixi_core/src/environment/mod.rs b/crates/pixi_core/src/environment/mod.rs index 9b94babd27..1718330fd1 100644 --- a/crates/pixi_core/src/environment/mod.rs +++ b/crates/pixi_core/src/environment/mod.rs @@ -189,10 +189,12 @@ impl LockedEnvironmentHash { match package { // A select set of fields are used to hash the package LockedPackageRef::Conda(pack) => { - if let Some(sha) = pack.record().sha256 { - sha.hash(&mut hasher); - } else if let Some(md5) = pack.record().md5 { - md5.hash(&mut hasher); + if let Some(record) = pack.record() { + if let Some(sha) = record.sha256 { + sha.hash(&mut hasher); + } else if let Some(md5) = record.md5 { + md5.hash(&mut hasher); + } } } LockedPackageRef::Pypi(_) => {} diff --git a/crates/pixi_core/src/lock_file/install_subset.rs b/crates/pixi_core/src/lock_file/install_subset.rs index b2e3b42745..989e3dd610 100644 --- a/crates/pixi_core/src/lock_file/install_subset.rs +++ b/crates/pixi_core/src/lock_file/install_subset.rs @@ -37,16 +37,7 @@ impl<'a> From> for PackageNode { let dependency_names: Vec = match package_ref { LockedPackageRef::Conda(conda_data) => { // Extract dependencies from conda data and parse as MatchSpec - let depends = match conda_data { - rattler_lock::CondaPackageData::Binary(binary_data) => { - &binary_data.package_record.depends - } - rattler_lock::CondaPackageData::Source(source_data) => { - &source_data.package_record.depends - } - }; - - depends + conda_data.depends() .iter() .filter_map(|dep_spec| { // Parse as MatchSpec to get the package name diff --git a/crates/pixi_core/src/lock_file/outdated.rs b/crates/pixi_core/src/lock_file/outdated.rs index 14e8e0228c..6d3a7c78a2 100644 --- a/crates/pixi_core/src/lock_file/outdated.rs +++ b/crates/pixi_core/src/lock_file/outdated.rs @@ -9,7 +9,7 @@ use futures::FutureExt; use futures::StreamExt; use futures::stream::FuturesUnordered; use itertools::Itertools; -use pixi_command_dispatcher::CommandDispatcher; +use pixi_command_dispatcher::{CommandDispatcher, CommandDispatcherError}; use pixi_consts::consts; use pixi_manifest::FeaturesExt; use rattler_conda_types::Platform; @@ -251,7 +251,19 @@ async fn find_unsatisfiable_targets<'p>( Ok(verified_env) => { verified_environments.insert((environment.clone(), platform), verified_env); } - Err(unsat) if unsat.is_pypi_only() => { + Err(CommandDispatcherError::Cancelled) => { + tracing::info!( + "the dependencies of environment '{0}' for platform {platform} are out of date because the operation was cancelled", + environment.name().fancy_display() + ); + + unsatisfiable_targets + .outdated_conda + .entry(environment.clone()) + .or_default() + .insert(platform); + } + Err(CommandDispatcherError::Failed(unsat)) if unsat.is_pypi_only() => { tracing::info!( "the pypi dependencies of environment '{0}' for platform {platform} are out of date because {unsat}", environment.name().fancy_display() @@ -263,7 +275,7 @@ async fn find_unsatisfiable_targets<'p>( .or_default() .insert(platform); } - Err(unsat) => { + Err(CommandDispatcherError::Failed(unsat)) => { tracing::info!( "the dependencies of environment '{0}' for platform {platform} are out of date because {unsat}", environment.name().fancy_display() @@ -434,10 +446,10 @@ fn find_inconsistent_solve_groups<'p>( { match package { LockedPackageRef::Conda(pkg) => { - match conda_packages_by_name.get(&pkg.record().name) { + match conda_packages_by_name.get(pkg.name()) { None => { conda_packages_by_name - .insert(pkg.record().name.clone(), pkg.location().clone()); + .insert(pkg.name().clone(), pkg.location().clone()); } Some(url) if pkg.location() != url => { conda_package_mismatch = true; diff --git a/crates/pixi_core/src/lock_file/records_by_name.rs b/crates/pixi_core/src/lock_file/records_by_name.rs index 00ed216793..d0f44dedfd 100644 --- a/crates/pixi_core/src/lock_file/records_by_name.rs +++ b/crates/pixi_core/src/lock_file/records_by_name.rs @@ -207,7 +207,7 @@ impl PixiRecordsByName { .map(move |identifiers| (idx, record, identifiers)) } PixiRecord::Source(source_record) => { - PypiPackageIdentifier::from_package_record(&source_record.package_record) + PypiPackageIdentifier::from_package_record(source_record.package_record()) .ok() .map(move |identifiers| (idx, record, identifiers)) } diff --git a/crates/pixi_core/src/lock_file/resolve/pypi.rs b/crates/pixi_core/src/lock_file/resolve/pypi.rs index e401bfa330..6ba3d07b6c 100644 --- a/crates/pixi_core/src/lock_file/resolve/pypi.rs +++ b/crates/pixi_core/src/lock_file/resolve/pypi.rs @@ -309,7 +309,7 @@ pub async fn resolve_pypi( PypiPackageIdentifier::from_repodata_record(repodata_record) } PixiRecord::Source(source_record) => { - PypiPackageIdentifier::from_package_record(&source_record.package_record) + PypiPackageIdentifier::from_package_record(source_record.package_record()) } }; diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index e6e1252942..5148bd740b 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -26,7 +26,7 @@ use pixi_manifest::{ use pixi_pypi_spec::PixiPypiSource; use pixi_record::{ DevSourceRecord, LockedGitUrl, ParseLockFileError, PinnedBuildSourceSpec, PinnedSourceSpec, - PixiRecord, SourceMismatchError, SourceRecord, VariantValue, + PixiRecord, SourceMismatchError, SourceRecord, UnresolvedPixiRecord, VariantValue, }; use pixi_spec::{ PixiSpec, SourceAnchor, SourceLocationSpec, SourceSpec, SpecConversionError, Subdirectory, @@ -878,9 +878,10 @@ pub async fn verify_platform_satisfiability( locked_environment: rattler_lock::Environment<'_>, platform: Platform, project_root: &Path, -) -> Result> { - // Convert the lock file into a list of conda and pypi packages - let mut pixi_records: Vec = Vec::new(); +) -> Result>> { + // Convert the lock file into a list of conda and pypi packages. + // Read as UnresolvedPixiRecord first, then resolve any partial source records. + let mut unresolved_records: Vec = Vec::new(); let mut pypi_packages: Vec = Vec::new(); let lock_platform = locked_environment .lock_file() @@ -893,9 +894,10 @@ pub async fn verify_platform_satisfiability( match package { LockedPackageRef::Conda(conda) => { let url = conda.location().clone(); - pixi_records.push( - PixiRecord::from_conda_package_data(conda.clone(), project_root) - .map_err(|e| PlatformUnsat::CorruptedEntry(url.to_string(), e))?, + unresolved_records.push( + UnresolvedPixiRecord::from_conda_package_data(conda.clone(), project_root) + .map_err(|e| Box::new(PlatformUnsat::CorruptedEntry(url.to_string(), e))) + .map_err(CommandDispatcherError::Failed)?, ); } LockedPackageRef::Pypi(pypi) => { @@ -904,6 +906,107 @@ pub async fn verify_platform_satisfiability( } } + // Resolve any partial source records using source_metadata(). + let pixi_records: Vec = { + let has_partials = unresolved_records.iter().any(|r| r.is_partial()); + if has_partials { + let channel_config = environment.workspace().channel_config(); + let channels: Vec = environment + .channels() + .into_iter() + .cloned() + .map(|c| c.into_base_url(&channel_config)) + .collect::, _>>() + .map_err(|e| { + CommandDispatcherError::Failed(Box::new(PlatformUnsat::InvalidChannel(e))) + })?; + let VariantConfig { + variant_configuration, + variant_files, + } = environment.workspace().variants(platform).map_err(|e| { + CommandDispatcherError::Failed(Box::new(PlatformUnsat::Variants(e))) + })?; + let virtual_packages: Vec = environment + .virtual_packages(platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + + let mut resolved = Vec::with_capacity(unresolved_records.len()); + for record in unresolved_records { + match record.try_into_resolved() { + Ok(pixi_record) => resolved.push(pixi_record), + Err(partial) => { + let source = partial.as_source().expect("partial must be source"); + let spec = SourceMetadataSpec { + package: source.name().clone(), + backend_metadata: BuildBackendMetadataSpec { + manifest_source: source.manifest_source().clone(), + preferred_build_source: source + .build_source() + .cloned() + .map(PinnedBuildSourceSpec::into_pinned), + channel_config: channel_config.clone(), + channels: channels.clone(), + build_environment: BuildEnvironment { + host_platform: platform, + build_platform: platform, + host_virtual_packages: virtual_packages.clone(), + build_virtual_packages: virtual_packages.clone(), + }, + variant_configuration: Some(variant_configuration.clone()), + variant_files: Some(variant_files.clone()), + enabled_protocols: EnabledProtocols::default(), + }, + }; + + let partial_name = source.name().clone(); + let partial_variants = source.variants().clone(); + + let metadata = command_dispatcher + .source_metadata(spec) + .await + .map_err_with(|e| Box::new(PlatformUnsat::SourceMetadata(e)))?; + + let matched = metadata + .records + .iter() + .find(|r| { + r.name() == &partial_name + && (partial_variants.is_empty() + || r.variants() == &partial_variants) + }) + .ok_or_else(|| { + CommandDispatcherError::Failed(Box::new( + PlatformUnsat::SourcePackageNotFoundInMetadata { + package_name: partial_name.as_source().to_string(), + manifest_path: source + .manifest_source() + .as_path() + .map(|p| p.path.to_string()) + .unwrap_or_else(|| { + source.manifest_source().to_string() + }), + }, + )) + })?; + + resolved.push(PixiRecord::Source(matched.clone())); + } + } + } + resolved + } else { + unresolved_records + .into_iter() + .map(|r| { + r.try_into_resolved() + .expect("all records verified as non-partial") + }) + .collect() + } + }; + // to reflect new purls for pypi packages // we need to invalidate the locked environment // if all conda packages have empty purls @@ -915,7 +1018,9 @@ pub async fn verify_platform_satisfiability( .all(|record| record.package_record.purls.is_none()) { { - return Err(Box::new(PlatformUnsat::MissingPurls)); + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::MissingPurls, + ))); } } @@ -924,8 +1029,10 @@ pub async fn verify_platform_satisfiability( let pixi_records_by_name = match PixiRecordsByName::from_unique_iter(pixi_records) { Ok(pixi_records) => pixi_records, Err(duplicate) => { - return Err(Box::new(PlatformUnsat::DuplicateEntry( - duplicate.package_record().name.as_source().to_string(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::DuplicateEntry( + duplicate.package_record().name.as_source().to_string(), + ), ))); } }; @@ -935,8 +1042,8 @@ pub async fn verify_platform_satisfiability( let pypi_records_by_name = match PypiRecordsByName::from_unique_iter(pypi_packages) { Ok(pypi_packages) => pypi_packages, Err(duplicate) => { - return Err(Box::new(PlatformUnsat::DuplicateEntry( - duplicate.name.to_string(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::DuplicateEntry(duplicate.name.to_string()), ))); } }; @@ -1294,12 +1401,12 @@ async fn verify_source_metadata( results.push(async move { // Build source metadata spec to request current package metadata let source_metadata_spec = SourceMetadataSpec { - package: source_record.package_record.name.clone(), + package: source_record.name().clone(), backend_metadata: BuildBackendMetadataSpec { - manifest_source: source_record.manifest_source.clone(), + manifest_source: source_record.manifest_source().clone(), preferred_build_source: source_record - .build_source - .clone() + .build_source() + .cloned() .map(PinnedBuildSourceSpec::into_pinned), channel_config, channels: channel_urls, @@ -1345,12 +1452,12 @@ fn verify_locked_source_record( ) -> Result<(), Box> { if current_source_metadata.records.is_empty() { return Err(Box::new(PlatformUnsat::SourcePackageNotFoundInMetadata { - package_name: source_record.package_record.name.as_source().to_string(), + package_name: source_record.name().as_source().to_string(), manifest_path: source_record - .manifest_source + .manifest_source() .as_path() .map(|p| p.path.to_string()) - .unwrap_or_else(|| source_record.manifest_source.to_string()), + .unwrap_or_else(|| source_record.manifest_source().to_string()), })); } @@ -1365,10 +1472,10 @@ fn verify_locked_source_record( let Some(current_record) = current_record else { let manifest_path = source_record - .manifest_source + .manifest_source() .as_path() .map(|p| p.path.to_string()) - .unwrap_or_else(|| source_record.manifest_source.to_string()); + .unwrap_or_else(|| source_record.manifest_source().to_string()); return Err(Box::new(PlatformUnsat::NoMatchingSourcePackageInMetadata { package: format_source_record(source_record), manifest_path, @@ -1381,26 +1488,24 @@ fn verify_locked_source_record( }; // Check if the build source location changed - if current_record.build_source != source_record.build_source { + if current_record.build_source() != source_record.build_source() { return Err(Box::new(PlatformUnsat::SourceBuildLocationChanged( - source_record.package_record.name.as_source().to_string(), + source_record.name().as_source().to_string(), source_record - .build_source - .as_ref() + .build_source() .map(|s| s.to_string()) .unwrap_or_default(), current_record - .build_source - .as_ref() + .build_source() .map(|s| s.to_string()) .unwrap_or_default(), ))); } // Check if the source dependencies match - let package_name = source_record.package_record.name.as_source().to_string(); - for (source_name, locked_source_spec) in &source_record.sources { - match current_record.sources.get(source_name) { + let package_name = source_record.name().as_source().to_string(); + for (source_name, locked_source_spec) in source_record.sources() { + match current_record.sources().get(source_name) { Some(current_source_spec) => { if locked_source_spec != current_source_spec { return Err(Box::new(PlatformUnsat::SourceDependencyChanged { @@ -1423,8 +1528,8 @@ fn verify_locked_source_record( } // Check if there are any new sources in current that weren't in locked - for (source_name, current_source_spec) in ¤t_record.sources { - if !source_record.sources.contains_key(source_name) { + for (source_name, current_source_spec) in current_record.sources() { + if !source_record.sources().contains_key(source_name.as_str()) { return Err(Box::new(PlatformUnsat::SourceDependencyChanged { package: package_name.clone(), dependency: source_name.clone(), @@ -1435,15 +1540,15 @@ fn verify_locked_source_record( } // Check if the package record metadata matches - let package_name = source_record.package_record.name.as_source(); + let package_name = source_record.name().as_source(); tracing::trace!( "Checking package record equality for '{}' (current vs locked)", package_name ); if let Err(reason) = package_records_are_equal( - ¤t_record.package_record, - &source_record.package_record, + current_record.package_record(), + source_record.package_record(), ) { return Err(Box::new(PlatformUnsat::SourcePackageMetadataChanged( package_name.to_string(), @@ -1598,16 +1703,16 @@ fn package_records_are_equal(a: &PackageRecord, b: &PackageRecord) -> Result<(), fn format_source_record(r: &SourceRecord) -> String { let variants = format!( "[{}]", - r.variants + r.variants() .iter() .format_with(", ", |(k, v), f| f(&format_args!("{k}={v}"))) ); format!( "{}/{}={}={} {}", - &r.package_record.subdir, - r.package_record.name.as_source(), - &r.package_record.version, - &r.package_record.build, + &r.package_record().subdir, + r.package_record().name.as_source(), + &r.package_record().version, + &r.package_record().build, variants, ) } @@ -1767,7 +1872,7 @@ pub(crate) async fn verify_package_platform_satisfiability( locked_pypi_environment: &PypiRecordsByName, platform: Platform, project_root: &Path, -) -> Result> { +) -> Result>> { // Determine the dependencies requested by the environment let environment_dependencies = environment .combined_dependencies(Some(platform)) @@ -1797,7 +1902,8 @@ pub(crate) async fn verify_package_platform_satisfiability( })?; Ok((uv_req.name.clone(), uv_req)) }) - .collect::, _>>()?; + .collect::, _>>() + .map_err(CommandDispatcherError::Failed)?; // Find the python interpreter from the list of conda packages. Note that this // refers to the locked python interpreter, it might not match the specs @@ -1823,7 +1929,7 @@ pub(crate) async fn verify_package_platform_satisfiability( let marker_environment = match marker_environment { Err(err) => { if !pypi_dependencies.is_empty() { - return Err(err); + return Err(CommandDispatcherError::Failed(err)); } else { None } @@ -1850,11 +1956,12 @@ pub(crate) async fn verify_package_platform_satisfiability( )) }) }) - .collect::, _>>()?; + .collect::, _>>() + .map_err(CommandDispatcherError::Failed)?; if pypi_requirements.is_empty() && !locked_pypi_environment.is_empty() { - return Err(Box::new(PlatformUnsat::TooManyPypiPackages( - locked_pypi_environment.names().cloned().collect(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::TooManyPypiPackages(locked_pypi_environment.names().cloned().collect()), ))); } @@ -1877,7 +1984,8 @@ pub(crate) async fn verify_package_platform_satisfiability( .iter() .map(|c| c.clone().into_base_url(&channel_config)) .collect::, _>>() - .map_err(|e| Box::new(PlatformUnsat::InvalidChannel(e)))?; + .map_err(|e| Box::new(PlatformUnsat::InvalidChannel(e))) + .map_err(CommandDispatcherError::Failed)?; // Check that all locked conda packages satisfy the current constraints. // If a constraint is violated, the lock file needs to be re-solved. @@ -1888,8 +1996,10 @@ pub(crate) async fn verify_package_platform_satisfiability( // Source specs are not valid in [constraints]; raise an error. let binary_spec = match pixi_spec.into_source_or_binary() { Either::Left(_) => { - return Err(Box::new(PlatformUnsat::SourceConstraintNotSupported( - package_name.as_source().to_string(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::SourceConstraintNotSupported( + package_name.as_source().to_string(), + ), ))); } Either::Right(binary_spec) => binary_spec, @@ -1908,10 +2018,10 @@ pub(crate) async fn verify_package_platform_satisfiability( SpecConversionError::InvalidChannel(_name, p) => p.into(), SpecConversionError::MissingName => ParseMatchSpecError::MissingPackageName, }; - Box::new(PlatformUnsat::FailedToParseMatchSpec( + CommandDispatcherError::Failed(Box::new(PlatformUnsat::FailedToParseMatchSpec( package_name.as_source().to_string(), parse_err, - )) + ))) })?; // Only check packages that are actually locked; constraints only apply // to installed packages. Source records are excluded because they are @@ -1920,11 +2030,13 @@ pub(crate) async fn verify_package_platform_satisfiability( && let Some(binary_record) = locked_record.as_binary() && !nameless_spec.matches(&binary_record.package_record) { - return Err(Box::new(PlatformUnsat::ConstraintViolated { - package: package_name.as_source().to_string(), - locked_version: binary_record.package_record.version.to_string(), - constraint: nameless_spec.to_string(), - })); + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::ConstraintViolated { + package: package_name.as_source().to_string(), + locked_version: binary_record.package_record.version.to_string(), + constraint: nameless_spec.to_string(), + }, + ))); } } @@ -1935,7 +2047,7 @@ pub(crate) async fn verify_package_platform_satisfiability( } = environment .workspace() .variants(platform) - .map_err(|e| Box::new(PlatformUnsat::Variants(e)))?; + .map_err(|e| CommandDispatcherError::Failed(Box::new(PlatformUnsat::Variants(e))))?; let build_environment = BuildEnvironment::simple(platform, virtual_packages.values().cloned().collect()); @@ -1974,26 +2086,29 @@ pub(crate) async fn verify_package_platform_satisfiability( // dropped early. let (dev_deps_result, source_metadata_result) = futures::join!(dev_deps_future, source_metadata_future); - if let Err(CommandDispatcherError::Failed(e)) = source_metadata_result { - return Err(e); - } - let resolved_dev_dependencies = match dev_deps_result { - Ok(deps) => deps, - Err(CommandDispatcherError::Cancelled) => Vec::new(), - Err(CommandDispatcherError::Failed(e)) => return Err(e), - }; + + let resolved_dev_dependencies = match (dev_deps_result, source_metadata_result) { + // If any errored, we error. + (Err(CommandDispatcherError::Failed(e)), _) + | (_, Err(CommandDispatcherError::Failed(e))) => Err(CommandDispatcherError::Failed(e)), + // Otherwise, if any was cancelled, we return cancelled. + (Err(CommandDispatcherError::Cancelled), _) + | (_, Err(CommandDispatcherError::Cancelled)) => Err(CommandDispatcherError::Cancelled), + (Ok(resolved_dev_dependencies), _) => Ok(resolved_dev_dependencies), + }?; if (environment_dependencies.is_empty() && resolved_dev_dependencies.is_empty()) && !locked_pixi_records.is_empty() { - return Err(Box::new(PlatformUnsat::TooManyCondaPackages(Vec::new()))); + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::TooManyCondaPackages(Vec::new()), + ))); } // Determine the pypi packages provided by the locked conda packages. let locked_conda_pypi_packages = locked_pixi_records .by_pypi_name() - .map_err(From::from) - .map_err(Box::new)?; + .map_err(|e| CommandDispatcherError::Failed(Box::new(e.into())))?; // Keep a list of all conda packages that we have already visited let mut conda_packages_visited = HashSet::new(); @@ -2026,12 +2141,8 @@ pub(crate) async fn verify_package_platform_satisfiability( let found_package = match spec.into_source_or_binary() { Either::Left(source_spec) => { expected_conda_source_dependencies.insert(name.clone()); - find_matching_source_package( - locked_pixi_records, - name, - source_spec, - source, - )? + find_matching_source_package(locked_pixi_records, name, source_spec, source) + .map_err(CommandDispatcherError::Failed)? } Either::Right(binary_spec) => { let spec = match binary_spec.try_into_nameless_match_spec(&channel_config) { @@ -2051,9 +2162,11 @@ pub(crate) async fn verify_package_platform_satisfiability( ParseMatchSpecError::MissingPackageName } }; - return Err(Box::new(PlatformUnsat::FailedToParseMatchSpec( - name.as_source().to_string(), - parse_channel_err, + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::FailedToParseMatchSpec( + name.as_source().to_string(), + parse_channel_err, + ), ))); } Ok(spec) => spec, @@ -2063,7 +2176,9 @@ pub(crate) async fn verify_package_platform_satisfiability( &virtual_packages, MatchSpec::from_nameless(spec, name.into()), source, - )? { + ) + .map_err(CommandDispatcherError::Failed)? + { Some(pkg) => pkg, None => continue, } @@ -2075,7 +2190,9 @@ pub(crate) async fn verify_package_platform_satisfiability( FoundPackage::Conda(found_package) } Dependency::Conda(spec, source) => { - match find_matching_package(locked_pixi_records, &virtual_packages, spec, source)? { + match find_matching_package(locked_pixi_records, &virtual_packages, spec, source) + .map_err(CommandDispatcherError::Failed)? + { Some(pkg) => { expected_conda_packages .insert(locked_pixi_records.records[pkg.0].name().clone()); @@ -2086,12 +2203,10 @@ pub(crate) async fn verify_package_platform_satisfiability( } Dependency::CondaSource(name, source_spec, source) => { expected_conda_source_dependencies.insert(name.clone()); - FoundPackage::Conda(find_matching_source_package( - locked_pixi_records, - name, - source_spec, - source, - )?) + FoundPackage::Conda( + find_matching_source_package(locked_pixi_records, name, source_spec, source) + .map_err(CommandDispatcherError::Failed)?, + ) } Dependency::PyPi(requirement, source) => { // Check if there is a pypi identifier that matches our requirement. @@ -2129,7 +2244,10 @@ pub(crate) async fn verify_package_platform_satisfiability( .cloned() .unwrap_or(requirement.clone()); - if !identifier.satisfies(&requirement_to_check)? { + if !identifier + .satisfies(&requirement_to_check) + .map_err(CommandDispatcherError::Failed)? + { // The record does not match the spec, the lock-file is inconsistent. delayed_pypi_error.get_or_insert_with(|| { Box::new(PlatformUnsat::CondaUnsatisfiableRequirement( @@ -2206,8 +2324,11 @@ pub(crate) async fn verify_package_platform_satisfiability( let record = &locked_pixi_records.records[idx.0]; for depends in &record.package_record().depends { - let spec = MatchSpec::from_str(depends.as_str(), Lenient) - .map_err(|e| PlatformUnsat::FailedToParseMatchSpec(depends.clone(), e))?; + let spec = MatchSpec::from_str(depends.as_str(), Lenient).map_err(|e| { + CommandDispatcherError::Failed(Box::new( + PlatformUnsat::FailedToParseMatchSpec(depends.clone(), e), + )) + })?; let (name, spec) = spec.into_nameless(); let (origin, anchor) = match record { @@ -2218,7 +2339,7 @@ pub(crate) async fn verify_package_platform_satisfiability( PixiRecord::Source(record) => ( Cow::Owned(format!( "{} @ {}", - record.package_record.name.as_source(), + record.name().as_source(), &record.manifest_source )), SourceLocationSpec::from(record.manifest_source.clone()).into(), @@ -2230,7 +2351,7 @@ pub(crate) async fn verify_package_platform_satisfiability( .as_exact() .expect("depends can only contain exact package names"); Some(( - record.sources.get(package_name.as_normalized())?, + record.sources().get(package_name.as_normalized())?, package_name, )) }) { @@ -2254,7 +2375,9 @@ pub(crate) async fn verify_package_platform_satisfiability( // If there is no marker environment there is no python version let Some(marker_environment) = marker_environment.as_ref() else { - return Err(Box::new(PlatformUnsat::MissingPythonInterpreter)); + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::MissingPythonInterpreter, + ))); }; if pypi_packages_visited.insert(idx) { @@ -2357,18 +2480,20 @@ pub(crate) async fn verify_package_platform_satisfiability( // Check if all locked packages have also been visited if conda_packages_visited.len() != locked_pixi_records.len() { - return Err(Box::new(PlatformUnsat::TooManyCondaPackages( - locked_pixi_records - .names() - .enumerate() - .filter_map(|(idx, name)| { - if conda_packages_visited.contains(&CondaPackageIdx(idx)) { - None - } else { - Some(name.clone()) - } - }) - .collect(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::TooManyCondaPackages( + locked_pixi_records + .names() + .enumerate() + .filter_map(|(idx, name)| { + if conda_packages_visited.contains(&CondaPackageIdx(idx)) { + None + } else { + Some(name.clone()) + } + }) + .collect(), + ), ))); } @@ -2380,9 +2505,9 @@ pub(crate) async fn verify_package_platform_satisfiability( .iter() .filter_map(PixiRecord::as_source) { - if !expected_conda_source_dependencies.contains(&record.package_record.name) { - return Err(Box::new(PlatformUnsat::RequiredBinaryIsSource( - record.package_record.name.as_source().to_string(), + if !expected_conda_source_dependencies.contains(record.name()) { + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::RequiredBinaryIsSource(record.name().as_source().to_string()), ))); } } @@ -2390,22 +2515,24 @@ pub(crate) async fn verify_package_platform_satisfiability( // Now that we checked all conda requirements, check if there were any pypi // issues. if let Some(err) = delayed_pypi_error { - return Err(err); + return Err(CommandDispatcherError::Failed(err)); } if pypi_packages_visited.len() != locked_pypi_environment.len() { - return Err(Box::new(PlatformUnsat::TooManyPypiPackages( - locked_pypi_environment - .names() - .enumerate() - .filter_map(|(idx, name)| { - if pypi_packages_visited.contains(&PypiPackageIdx(idx)) { - None - } else { - Some(name.clone()) - } - }) - .collect(), + return Err(CommandDispatcherError::Failed(Box::new( + PlatformUnsat::TooManyPypiPackages( + locked_pypi_environment + .names() + .enumerate() + .filter_map(|(idx, name)| { + if pypi_packages_visited.contains(&PypiPackageIdx(idx)) { + None + } else { + Some(name.clone()) + } + }) + .collect(), + ), ))); } @@ -2416,7 +2543,8 @@ pub(crate) async fn verify_package_platform_satisfiability( // the same path-based package. // Verify the pixi build package's package_build_source matches the manifest. - verify_build_source_matches_manifest(environment, locked_pixi_records)?; + verify_build_source_matches_manifest(environment, locked_pixi_records) + .map_err(CommandDispatcherError::Failed)?; Ok(VerifiedIndividualEnvironment { expected_conda_packages, @@ -2631,12 +2759,12 @@ fn verify_build_source_matches_manifest( let ok = Ok(()); let error = Err(Box::new(PlatformUnsat::PackageBuildSourceMismatch( - src_record.package_record.name.as_source().to_string(), + src_record.name().as_source().to_string(), SourceMismatchError::SourceTypeMismatch, ))); let sat_err = |e| { Box::new(PlatformUnsat::PackageBuildSourceMismatch( - src_record.package_record.name.as_source().to_string(), + src_record.name().as_source().to_string(), e, )) }; @@ -2755,7 +2883,14 @@ mod tests { project.root(), ) .await - .map_err(|e| LockfileUnsat::PlatformUnsat(env.name().to_string(), platform, *e))?; + .map_err(|e| match e { + CommandDispatcherError::Failed(e) => { + LockfileUnsat::PlatformUnsat(env.name().to_string(), platform, *e) + } + CommandDispatcherError::Cancelled => { + panic!("operation was cancelled which should never happen here") + } + })?; individual_verified_envs.insert((env.name(), platform), verified_env); } diff --git a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap index a6515b6004..1c526262dc 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap +++ b/crates/pixi_core/src/lock_file/satisfiability/snapshots/pixi_core__lock_file__satisfiability__tests__failing_satisfiability@pypi-no-longer-dynamic.snap @@ -5,4 +5,4 @@ expression: s environment 'default' does not satisfy the requirements of the project for platform 'win-64' Diagnostic severity: error Caused by: source tree hash for dynamic-dep does not match the hash in the lock-file - Caused by: the computed source tree hash is '50db39844d97e7135f4c4f85d97e02b97f82179de450ca879c12a8fd5db456cc', but the lock-file contains '93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d' + Caused by: the computed source tree hash is 'f63894414ac1fb1dd627b8c8bd3998d9232ae5d0b900c7a7498f9c35c3678b72', but the lock-file contains '93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d' diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index ce0c25ad9a..3a0672fa80 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -18,9 +18,11 @@ use indexmap::{IndexMap, IndexSet}; use indicatif::ProgressBar; use itertools::{Either, Itertools}; use miette::{Diagnostic, IntoDiagnostic, MietteDiagnostic, Report, WrapErr}; +use pixi_build_discovery::EnabledProtocols; use pixi_command_dispatcher::{ - BuildEnvironment, CommandDispatcher, CommandDispatcherError, CommandDispatcherErrorResultExt, - PixiEnvironmentSpec, SolvePixiEnvironmentError, executor::CancellationAwareFutures, + BuildBackendMetadataSpec, BuildEnvironment, CommandDispatcher, CommandDispatcherError, + CommandDispatcherErrorResultExt, PixiEnvironmentSpec, SolvePixiEnvironmentError, + SourceMetadataSpec, executor::CancellationAwareFutures, }; use pixi_consts::consts; use pixi_glob::GlobHashCache; @@ -30,7 +32,10 @@ use pixi_install_pypi::{ }; use pixi_manifest::{ChannelPriority, EnvironmentName, FeaturesExt}; use pixi_progress::global_multi_progress; -use pixi_record::{ParseLockFileError, PixiRecord}; +use pixi_record::{ + ParseLockFileError, PinnedBuildSourceSpec, PixiRecord, UnresolvedPixiRecord, + UnresolvedSourceRecord, VariantValue, +}; use pixi_utils::{prefix::Prefix, variants::VariantConfig}; use pixi_uv_context::UvResolutionContext; use pixi_uv_conversions::{ @@ -40,7 +45,9 @@ use pixi_uv_conversions::{ use pypi_mapping::{self, MappingClient}; use pypi_modifiers::pypi_marker_env::determine_marker_environment; use rattler::package_cache::PackageCache; -use rattler_conda_types::{Arch, GenericVirtualPackage, PackageName, ParseChannelError, Platform}; +use rattler_conda_types::{ + Arch, ChannelUrl, GenericVirtualPackage, PackageName, ParseChannelError, Platform, +}; use rattler_lock::{LockFile, LockedPackageRef, ParseCondaLockError}; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -694,12 +701,41 @@ impl<'p> LockFileDerivedData<'p> { let (ignored_conda, ignored_pypi): (HashSet<_>, HashSet<_>) = ignored.into_iter().partition_map(|p| match p { - LockedPackageRef::Conda(data) => Either::Left(data.record().name.clone()), + LockedPackageRef::Conda(data) => Either::Left(data.name().clone()), LockedPackageRef::Pypi(data) => Either::Right(data.name.clone()), }); - let pixi_records = - locked_packages_to_pixi_records(conda_packages, self.workspace.root())?; + let pixi_records = { + let channel_config = environment.channel_config(); + let channels: Vec = environment + .channels() + .into_iter() + .cloned() + .map(|c| c.into_base_url(&channel_config)) + .collect::, _>>() + .into_diagnostic()?; + let VariantConfig { + variant_configuration, + variant_files, + } = self.workspace.variants(platform).into_diagnostic()?; + let virtual_packages: Vec = environment + .virtual_packages(platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + locked_packages_to_pixi_records( + conda_packages, + self.workspace.root(), + &self.command_dispatcher, + channel_config, + channels, + variant_configuration, + variant_files, + virtual_packages, + platform, + ) + .await? + }; // Get the manifest's pypi dependencies for this environment to look up editability. // The lock file always stores editable=false, so we apply the actual @@ -900,7 +936,37 @@ impl<'p> LockFileDerivedData<'p> { } else { Vec::new() }; - let records = locked_packages_to_pixi_records(packages, self.workspace.root())?; + let records = { + let channel_config = environment.channel_config(); + let channels: Vec = environment + .channels() + .into_iter() + .cloned() + .map(|c| c.into_base_url(&channel_config)) + .collect::, _>>() + .into_diagnostic()?; + let VariantConfig { + variant_configuration, + variant_files, + } = self.workspace.variants(platform).into_diagnostic()?; + let virtual_packages: Vec = environment + .virtual_packages(platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + locked_packages_to_pixi_records( + packages, + self.workspace.root(), + &self.command_dispatcher, + channel_config, + channels, + variant_configuration, + variant_files, + virtual_packages, + platform, + ) + .await? + }; // Update the conda prefix let CondaPrefixUpdated { @@ -989,18 +1055,166 @@ impl PackageFilterNames { } } -fn locked_packages_to_pixi_records( +fn locked_packages_to_unresolved_records( conda_packages: Vec>, workspace_root: &std::path::Path, -) -> Result, Report> { - let pixi_records = conda_packages +) -> Result, Report> { + conda_packages .into_iter() .filter_map(LockedPackageRef::as_conda) .cloned() - .map(|data| PixiRecord::from_conda_package_data(data, workspace_root)) + .map(|data| UnresolvedPixiRecord::from_conda_package_data(data, workspace_root)) .collect::, _>>() + .into_diagnostic() +} + +#[allow(clippy::too_many_arguments)] +async fn locked_packages_to_pixi_records( + conda_packages: Vec>, + workspace_root: &std::path::Path, + command_dispatcher: &CommandDispatcher, + channel_config: rattler_conda_types::ChannelConfig, + channels: Vec, + variants: std::collections::BTreeMap>, + variant_files: Vec, + virtual_packages: Vec, + platform: Platform, +) -> Result, Report> { + let unresolved = locked_packages_to_unresolved_records(conda_packages, workspace_root)?; + resolve_unresolved_records( + unresolved, + command_dispatcher, + channel_config, + channels, + variants, + variant_files, + virtual_packages, + platform, + ) + .await +} + +/// Resolve a list of `UnresolvedPixiRecord` into `PixiRecord`. +/// +/// Full records pass through directly. Partial source records are resolved +/// by calling `source_metadata()` on the command dispatcher to fetch the +/// full metadata. +#[allow(clippy::too_many_arguments)] +async fn resolve_unresolved_records( + records: Vec, + command_dispatcher: &CommandDispatcher, + channel_config: rattler_conda_types::ChannelConfig, + channels: Vec, + variants: std::collections::BTreeMap>, + variant_files: Vec, + virtual_packages: Vec, + platform: Platform, +) -> Result, Report> { + let mut resolved = Vec::with_capacity(records.len()); + for record in records { + match record.try_into_resolved() { + Ok(pixi_record) => resolved.push(pixi_record), + Err(partial) => { + let pixi_record = resolve_partial_record( + partial, + command_dispatcher, + channel_config.clone(), + channels.clone(), + variants.clone(), + variant_files.clone(), + virtual_packages.clone(), + platform, + ) + .await?; + resolved.push(pixi_record); + } + } + } + Ok(resolved) +} + +/// Resolve a single partial `UnresolvedPixiRecord` into a `PixiRecord` +/// by fetching source metadata from the command dispatcher. +#[allow(clippy::too_many_arguments)] +async fn resolve_partial_record( + partial: UnresolvedPixiRecord, + command_dispatcher: &CommandDispatcher, + channel_config: rattler_conda_types::ChannelConfig, + channels: Vec, + variants: std::collections::BTreeMap>, + variant_files: Vec, + virtual_packages: Vec, + platform: Platform, +) -> Result { + let source = match partial.as_source() { + Some(s) => s, + None => { + // Binary records should never end up here since try_into_resolved + // would have succeeded. Defensive fallback. + miette::bail!("unexpected binary record in resolve_partial_record"); + } + }; + + let spec = build_source_metadata_spec(source, channel_config, channels, variants, variant_files, virtual_packages, platform); + + let metadata = command_dispatcher + .source_metadata(spec) + .await .into_diagnostic()?; - Ok(pixi_records) + + let partial_name = source.name().clone(); + let partial_variants = source.variants().clone(); + + // Find the record that matches by name and variants. + let matched = metadata + .records + .iter() + .find(|r| { + r.name() == &partial_name + && (partial_variants.is_empty() || r.variants() == &partial_variants) + }) + .cloned() + .ok_or_else(|| { + miette::miette!( + "could not resolve partial source record for '{}': no matching record found in source metadata", + partial_name.as_source() + ) + })?; + + Ok(PixiRecord::Source(matched)) +} + +/// Build a `SourceMetadataSpec` from an `UnresolvedSourceRecord`. +fn build_source_metadata_spec( + source: &UnresolvedSourceRecord, + channel_config: rattler_conda_types::ChannelConfig, + channels: Vec, + variants: std::collections::BTreeMap>, + variant_files: Vec, + virtual_packages: Vec, + platform: Platform, +) -> SourceMetadataSpec { + SourceMetadataSpec { + package: source.name().clone(), + backend_metadata: BuildBackendMetadataSpec { + manifest_source: source.manifest_source().clone(), + preferred_build_source: source + .build_source() + .cloned() + .map(PinnedBuildSourceSpec::into_pinned), + channel_config, + channels, + build_environment: BuildEnvironment { + host_platform: platform, + build_platform: platform, + host_virtual_packages: virtual_packages.clone(), + build_virtual_packages: virtual_packages, + }, + variant_configuration: Some(variants), + variant_files: Some(variant_files), + enabled_protocols: EnabledProtocols::default(), + }, + } } pub struct UpdateContext<'p> { @@ -1349,38 +1563,96 @@ impl<'p> UpdateContextBuilder<'p> { } }; - // Extract the current conda records from the lock-file - // TODO: Should we parallelize this? Measure please. + // Extract the current conda records from the lock-file. + // First collect as UnresolvedPixiRecord (sync), then resolve partials (async). let workspace_root = project.root(); - let locked_repodata_records = project + let command_dispatcher = &self.command_dispatcher; + + // Step 1: Collect unresolved records per environment and platform (sync). + #[allow(clippy::type_complexity)] + let unresolved_by_env: Vec<( + crate::workspace::Environment<'_>, + Vec<(Platform, Vec)>, + )> = project .environments() .into_iter() - .flat_map(|env| { - lock_file - .environment(env.name().as_str()) - .into_iter() - .map(move |locked_env| { - locked_env - .conda_packages_by_platform() - .map(|(lock_platform, records)| { - let platform = lock_platform.subdir(); - records - .cloned() - .map(|data| { - PixiRecord::from_conda_package_data(data, workspace_root) - }) - .collect::, _>>() - .map(|records| { - (platform, Arc::new(PixiRecordsByName::from_iter(records))) - }) + .filter_map(|env| { + let locked_env = lock_file.environment(env.name().as_str())?; + let platforms: Result, _> = locked_env + .conda_packages_by_platform() + .map(|(lock_platform, records)| { + let platform = lock_platform.subdir(); + let unresolved = records + .cloned() + .map(|data| { + UnresolvedPixiRecord::from_conda_package_data( + data, + workspace_root, + ) }) - .collect::, _>>() - .map(|records| (env.clone(), records)) + .collect::, _>>()?; + Ok((platform, unresolved)) }) + .collect(); + Some(platforms.map(|p| (env, p))) }) - .collect::>, _>>() + .collect::, ParseLockFileError>>() .into_diagnostic()?; + // Step 2: Resolve partials async. + let mut locked_repodata_records: HashMap< + crate::workspace::Environment<'_>, + HashMap>, + > = HashMap::new(); + for (env, platform_records) in unresolved_by_env { + let mut env_map = HashMap::new(); + for (platform, unresolved) in platform_records { + // Check if any records are partial. + let has_partials = unresolved.iter().any(|r| r.is_partial()); + let resolved = if has_partials { + let channel_config = env.channel_config(); + let channels: Vec = env + .channels() + .into_iter() + .cloned() + .map(|c| c.into_base_url(&channel_config)) + .collect::, _>>() + .into_diagnostic()?; + let VariantConfig { + variant_configuration, + variant_files, + } = project.variants(platform).into_diagnostic()?; + let virtual_packages: Vec = env + .virtual_packages(platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + resolve_unresolved_records( + unresolved, + command_dispatcher, + channel_config, + channels, + variant_configuration, + variant_files, + virtual_packages, + platform, + ) + .await? + } else { + // All records are full — resolve without async. + unresolved + .into_iter() + .map(|r| { + r.try_into_resolved() + .expect("all records verified as non-partial") + }) + .collect() + }; + env_map.insert(platform, Arc::new(PixiRecordsByName::from_iter(resolved))); + } + locked_repodata_records.insert(env, env_map); + } + let locked_pypi_records = project .environments() .into_iter() @@ -1653,10 +1925,10 @@ impl<'p> UpdateContext<'p> { .iter() .filter_map(|r| match r { PixiRecord::Source(src) => { - let name = src.package_record.name.clone(); + let name = src.name().clone(); if targets.contains(name.as_source()) { - src.build_source - .clone() + src.build_source() + .cloned() .map(|spec| (name, spec.into_pinned())) } else { None diff --git a/crates/pixi_core/src/lock_file/virtual_packages.rs b/crates/pixi_core/src/lock_file/virtual_packages.rs index bd9f400748..a11d8c637d 100644 --- a/crates/pixi_core/src/lock_file/virtual_packages.rs +++ b/crates/pixi_core/src/lock_file/virtual_packages.rs @@ -4,7 +4,7 @@ use miette::Diagnostic; use pixi_manifest::EnvironmentName; use pypi_modifiers::pypi_tags::{PyPITagError, get_tags_from_machine, is_python_record}; use rattler_conda_types::ParseStrictness::Lenient; -use rattler_conda_types::{GenericVirtualPackage, MatchSpec, Matches, PackageName, PackageRecord}; +use rattler_conda_types::{GenericVirtualPackage, MatchSpec, Matches, PackageName}; use rattler_conda_types::{ParseMatchSpecError, Platform}; use rattler_lock::{CondaPackageData, ConversionError, LockFile, PypiPackageData}; use rattler_virtual_packages::{ @@ -99,22 +99,14 @@ pub enum MachineValidationError { NoPythonRecordFound(Platform), } -/// Get the required virtual packages for the given environment based on the given lock file. -pub(crate) fn get_required_virtual_packages_from_conda_records( - conda_records: &[&PackageRecord], +/// Get the required virtual packages from dependency strings. +pub(crate) fn get_required_virtual_packages_from_depends( + depends: &[&str], ) -> Result, MachineValidationError> { - // Collect all dependencies from the package records. - let virtual_dependencies = conda_records + depends .iter() - .flat_map(|record| record.depends.iter().filter(|dep| dep.starts_with("__"))) - .collect_vec(); - - // Convert the virtual dependencies into `MatchSpec`s. - virtual_dependencies - .iter() - // Lenient parsing is used here because the dependencies to avoid issues with the parsing of the dependencies. - // As the user can't do anything about the dependencies, we don't want to fail the whole process because of a parsing error. - .map(|dep| MatchSpec::from_str(dep.as_str(), Lenient)) + .filter(|dep| dep.starts_with("__")) + .map(|dep| MatchSpec::from_str(dep, Lenient)) .dedup() .collect::, _>>() .map_err(MachineValidationError::DependencyParsingError) @@ -169,18 +161,24 @@ pub(crate) fn validate_system_meets_environment_requirements( return Ok(true); } - // Get package records from both binary and source packages - let conda_records = conda_packages + // Get depends from all packages (binary and source, including partial) + let all_depends: Vec<&str> = conda_packages .iter() - .map(|data| match data { - CondaPackageData::Binary(binary) => &binary.package_record, - CondaPackageData::Source(source) => &source.package_record, - }) + .flat_map(|data| data.depends()) + .map(|s| s.as_str()) .collect_vec(); // Get the virtual packages required by the conda records let required_virtual_packages = - get_required_virtual_packages_from_conda_records(&conda_records)?; + get_required_virtual_packages_from_depends(&all_depends)?; + + // Find the python package record (needed for wheel tag validation below). + // This works for binary and full source packages; partial source records + // don't have a PackageRecord and are skipped. + let python_record = conda_packages + .iter() + .filter_map(|data| data.record()) + .find(|record| is_python_record(record)); tracing::debug!( "Required virtual packages of environment '{}': {}", @@ -259,10 +257,7 @@ pub(crate) fn validate_system_meets_environment_requirements( if lock_platform.is_some_and(|p| environment.has_pypi_packages(p)) && let Some(pypi_packages) = lock_platform.and_then(|p| environment.pypi_packages(p)) { - // Get python record from conda packages - let python_record = conda_records - .iter() - .find(|record| is_python_record(record)) + let python_record = python_record .ok_or(MachineValidationError::NoPythonRecordFound(platform))?; // Check if all the wheel tags match the system virtual packages @@ -305,19 +300,19 @@ mod test { let platform = Platform::Linux64; let env = lockfile.default_environment().unwrap(); let lock_platform = lockfile.platform(&platform.to_string()).unwrap(); - let conda_data = env - .conda_repodata_records(lock_platform) - .map_err(MachineValidationError::RepodataConversionError) + let conda_packages = env + .conda_packages(lock_platform) .unwrap() - .unwrap(); + .collect::>(); - let conda_records: Vec<&PackageRecord> = conda_data + let all_depends: Vec<&str> = conda_packages .iter() - .map(|binding| &binding.package_record) + .flat_map(|data| data.depends()) + .map(|s| s.as_str()) .collect(); let virtual_matchspecs = - get_required_virtual_packages_from_conda_records(&conda_records).unwrap(); + get_required_virtual_packages_from_depends(&all_depends).unwrap(); assert!( virtual_matchspecs diff --git a/crates/pixi_core/src/workspace/mod.rs b/crates/pixi_core/src/workspace/mod.rs index 733f0c9af1..ccbeb4c556 100644 --- a/crates/pixi_core/src/workspace/mod.rs +++ b/crates/pixi_core/src/workspace/mod.rs @@ -762,7 +762,7 @@ impl Workspace { if affected_environments.contains(&(env.name().as_str(), platform)) { match package { LockedPackageRef::Conda(package) => { - !conda_packages.contains(&package.record().name) + !conda_packages.contains(package.name()) } LockedPackageRef::Pypi(package) => !pypi_packages.contains(&package.name), } diff --git a/crates/pixi_diff/src/lib.rs b/crates/pixi_diff/src/lib.rs index 7d09ba2d76..bf14cdfe3d 100644 --- a/crates/pixi_diff/src/lib.rs +++ b/crates/pixi_diff/src/lib.rs @@ -9,7 +9,7 @@ use itertools::{Either, Itertools}; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeaturesExt}; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, LockedPackage, LockedPackageRef}; +use rattler_lock::{CondaPackageData, LockFile, LockedPackage, LockedPackageRef}; use serde::Serialize; use serde_json::Value; use tabwriter::TabWriter; @@ -62,7 +62,7 @@ impl LockFileDiff { .flatten() .partition_map(|p| match p { LockedPackageRef::Conda(conda_package_data) => Either::Left(( - conda_package_data.record().name.clone(), + conda_package_data.name().clone(), conda_package_data, )), LockedPackageRef::Pypi(pypi_package_data) => { @@ -76,7 +76,7 @@ impl LockFileDiff { for package in packages { match package { LockedPackageRef::Conda(data) => { - let name = &data.record().name; + let name = data.name(); match previous_conda_packages.remove(name) { Some(previous) if previous.location() != data.location() => { diff.changed @@ -250,11 +250,20 @@ impl LockFileDiff { Changed(&'i LockedPackage, &'i LockedPackage), } + fn format_conda_identifier(p: &CondaPackageData) -> String { + match p { + CondaPackageData::Binary(b) => { + format!("{} {}", b.package_record.version.as_str(), &b.package_record.build) + } + CondaPackageData::Source(s) => { + format!("@ {}", &s.location) + } + } + } + fn format_package_identifier(package: &LockedPackage) -> String { match package { - LockedPackage::Conda(p) => { - format!("{} {}", &p.record().version.as_str(), &p.record().build) - } + LockedPackage::Conda(p) => format_conda_identifier(p), LockedPackage::Pypi(p) => p.version_string(), } } @@ -308,19 +317,56 @@ impl LockFileDiff { let name = previous.name(); let line = match (previous, current) { (LockedPackage::Conda(previous), LockedPackage::Conda(current)) => { - let previous = previous.record(); - let current = current.record(); - - format!( - "{} {} {}\t{} {}\t->\t{} {}", - console::style("~").yellow(), - consts::CondaEmoji, - name, - choose_style(&previous.version.as_str(), ¤t.version.as_str()), - choose_style(previous.build.as_str(), current.build.as_str()), - choose_style(¤t.version.as_str(), &previous.version.as_str()), - choose_style(current.build.as_str(), previous.build.as_str()), - ) + match (previous, current) { + (CondaPackageData::Binary(prev), CondaPackageData::Binary(curr)) => { + let prev_ver = prev.package_record.version.as_str(); + let curr_ver = curr.package_record.version.as_str(); + format!( + "{} {} {}\t{} {}\t->\t{} {}", + console::style("~").yellow(), + consts::CondaEmoji, + name, + choose_style(&prev_ver, &curr_ver), + choose_style(prev.package_record.build.as_str(), curr.package_record.build.as_str()), + choose_style(&curr_ver, &prev_ver), + choose_style(curr.package_record.build.as_str(), prev.package_record.build.as_str()), + ) + } + (CondaPackageData::Source(prev), CondaPackageData::Source(curr)) => { + let prev_loc = prev.location.to_string(); + let curr_loc = curr.location.to_string(); + format!( + "{} {} {}\t@ {}\t->\t@ {}", + console::style("~").yellow(), + consts::CondaEmoji, + name, + choose_style(&prev_loc, &curr_loc), + choose_style(&curr_loc, &prev_loc), + ) + } + (CondaPackageData::Binary(prev), CondaPackageData::Source(curr)) => { + format!( + "{} {} {}\t{} {}\t->\t@ {}", + console::style("~").yellow(), + consts::CondaEmoji, + name, + prev.package_record.version.as_str(), + prev.package_record.build, + curr.location, + ) + } + (CondaPackageData::Source(prev), CondaPackageData::Binary(curr)) => { + format!( + "{} {} {}\t@ {}\t->\t{} {}", + console::style("~").yellow(), + consts::CondaEmoji, + name, + prev.location, + curr.package_record.version.as_str(), + curr.package_record.build, + ) + } + } } (LockedPackage::Pypi(previous), LockedPackage::Pypi(current)) => { let prev_ver = previous.version_string(); @@ -398,13 +444,13 @@ impl LockFileJsonDiff { let add_diffs = packages_diff.added.into_iter().map(|new| match new { LockedPackage::Conda(pkg) => JsonPackageDiff { - name: pkg.record().name.as_normalized().to_string(), + name: pkg.name().as_normalized().to_string(), before: None, after: Some( serde_json::to_value(&pkg).expect("should be able to serialize"), ), ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&pkg.record().name), + explicit: conda_dependencies.contains_key(pkg.name()), }, LockedPackage::Pypi(pkg) => JsonPackageDiff { name: pkg.name.as_dist_info_name().into_owned(), @@ -419,13 +465,13 @@ impl LockFileJsonDiff { let removed_diffs = packages_diff.removed.into_iter().map(|old| match old { LockedPackage::Conda(pkg) => JsonPackageDiff { - name: pkg.record().name.as_normalized().to_string(), + name: pkg.name().as_normalized().to_string(), before: Some( serde_json::to_value(&pkg).expect("should be able to serialize"), ), after: None, ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&pkg.record().name), + explicit: conda_dependencies.contains_key(pkg.name()), }, LockedPackage::Pypi(pkg) => JsonPackageDiff { @@ -446,11 +492,11 @@ impl LockFileJsonDiff { let after = serde_json::to_value(&new).expect("should be able to serialize"); let (before, after) = compute_json_diff(before, after); JsonPackageDiff { - name: old.record().name.as_normalized().to_string(), + name: old.name().as_normalized().to_string(), before: Some(before), after: Some(after), ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&old.record().name), + explicit: conda_dependencies.contains_key(old.name()), } } (LockedPackage::Pypi(old), LockedPackage::Pypi(new)) => { diff --git a/crates/pixi_record/src/lib.rs b/crates/pixi_record/src/lib.rs index 9846223f30..055e542591 100644 --- a/crates/pixi_record/src/lib.rs +++ b/crates/pixi_record/src/lib.rs @@ -18,7 +18,10 @@ use rattler_conda_types::{ }; use rattler_lock::{CondaPackageData, ConversionError, UrlOrPath}; use serde::Serialize; -pub use source_record::{PinnedBuildSourceSpec, SourceRecord}; +pub use source_record::{ + FullSourceRecord as SourceRecord, FullSourceRecordData, PinnedBuildSourceSpec, + PartialSourceRecord, PartialSourceRecordData, SourceRecordData, UnresolvedSourceRecord, +}; use thiserror::Error; /// A record of a conda package that is either something installable from a @@ -42,7 +45,7 @@ impl PixiRecord { pub fn package_record(&self) -> &PackageRecord { match self { PixiRecord::Binary(record) => &record.package_record, - PixiRecord::Source(record) => &record.package_record, + PixiRecord::Source(record) => &record.data.package_record, } } @@ -57,32 +60,6 @@ impl PixiRecord { } } - /// Create PixiRecord from CondaPackageData with paths resolved relative to workspace_root. - /// This should be used when reading from the lock file. - pub fn from_conda_package_data( - data: CondaPackageData, - workspace_root: &std::path::Path, - ) -> Result { - let record = match data { - CondaPackageData::Binary(value) => { - let location = value.location.clone(); - PixiRecord::Binary(value.try_into().map_err(|err| match err { - ConversionError::Missing(field) => ParseLockFileError::Missing(location, field), - ConversionError::LocationToUrlConversionError(err) => { - ParseLockFileError::InvalidRecordUrl(location, err) - } - ConversionError::InvalidBinaryPackageLocation => { - ParseLockFileError::InvalidArchiveFilename(location) - } - })?) - } - CondaPackageData::Source(value) => { - PixiRecord::Source(SourceRecord::from_conda_source_data(value, workspace_root)?) - } - }; - Ok(record) - } - /// Returns a reference to the binary record if it is a binary record. pub fn as_binary(&self) -> Option<&RepoDataRecord> { match self { @@ -137,6 +114,158 @@ impl From for PixiRecord { } } +/// A record that may contain partial source metadata (not yet resolved). +/// +/// Used at the lock-file boundary: lock-file read produces these, and they must +/// be resolved to [`PixiRecord`] before use in solving/installing. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone)] +pub enum UnresolvedPixiRecord { + Binary(RepoDataRecord), + Source(UnresolvedSourceRecord), +} + +impl UnresolvedPixiRecord { + /// The name of the package. + pub fn name(&self) -> &PackageName { + match self { + UnresolvedPixiRecord::Binary(record) => &record.package_record.name, + UnresolvedPixiRecord::Source(record) => record.name(), + } + } + + /// Run-time dependencies. + pub fn depends(&self) -> &[String] { + match self { + UnresolvedPixiRecord::Binary(record) => &record.package_record.depends, + UnresolvedPixiRecord::Source(record) => record.depends(), + } + } + + /// Source dependency locations. Empty for binary records. + pub fn sources(&self) -> &std::collections::HashMap { + static EMPTY: std::sync::LazyLock< + std::collections::HashMap, + > = std::sync::LazyLock::new(std::collections::HashMap::new); + match self { + UnresolvedPixiRecord::Binary(_) => &EMPTY, + UnresolvedPixiRecord::Source(record) => record.sources(), + } + } + + /// Returns a reference to the binary record if it is one. + pub fn as_binary(&self) -> Option<&RepoDataRecord> { + match self { + UnresolvedPixiRecord::Binary(record) => Some(record), + UnresolvedPixiRecord::Source(_) => None, + } + } + + /// Returns a reference to the source record if it is one. + pub fn as_source(&self) -> Option<&UnresolvedSourceRecord> { + match self { + UnresolvedPixiRecord::Binary(_) => None, + UnresolvedPixiRecord::Source(record) => Some(record), + } + } + + /// Returns the full package record if available (binary or full source). + pub fn package_record(&self) -> Option<&PackageRecord> { + match self { + UnresolvedPixiRecord::Binary(record) => Some(&record.package_record), + UnresolvedPixiRecord::Source(record) => match &record.data { + SourceRecordData::Full(full) => Some(&full.package_record), + SourceRecordData::Partial(_) => None, + }, + } + } + + /// Returns true if this is a partial source record. + pub fn is_partial(&self) -> bool { + matches!( + self, + UnresolvedPixiRecord::Source(s) if s.data.is_partial() + ) + } + + /// Create from lock-file `CondaPackageData`. + pub fn from_conda_package_data( + data: CondaPackageData, + workspace_root: &std::path::Path, + ) -> Result { + match data { + CondaPackageData::Binary(value) => { + let location = value.location.clone(); + Ok(UnresolvedPixiRecord::Binary( + value.try_into().map_err(|err| match err { + ConversionError::Missing(field) => { + ParseLockFileError::Missing(location, field) + } + ConversionError::LocationToUrlConversionError(err) => { + ParseLockFileError::InvalidRecordUrl(location, err) + } + ConversionError::InvalidBinaryPackageLocation => { + ParseLockFileError::InvalidArchiveFilename(location) + } + })?, + )) + } + CondaPackageData::Source(value) => Ok(UnresolvedPixiRecord::Source( + UnresolvedSourceRecord::from_conda_source_data(value, workspace_root)?, + )), + } + } + + /// Convert to `CondaPackageData` for lock-file write. + pub fn into_conda_package_data(self, workspace_root: &Path) -> CondaPackageData { + match self { + UnresolvedPixiRecord::Binary(record) => record.into(), + UnresolvedPixiRecord::Source(record) => { + CondaPackageData::Source(record.into_conda_source_data(workspace_root)) + } + } + } + + /// Try to convert into a fully resolved `PixiRecord`. + /// + /// Returns `Ok(PixiRecord)` if this is a binary record or a full source + /// record. Returns `Err(self)` if this is a partial source record. + pub fn try_into_resolved(self) -> Result { + match self { + UnresolvedPixiRecord::Binary(record) => Ok(PixiRecord::Binary(record)), + UnresolvedPixiRecord::Source(source) => match source.data { + SourceRecordData::Full(full) => { + Ok(PixiRecord::Source(source_record::SourceRecord { + data: full, + manifest_source: source.manifest_source, + build_source: source.build_source, + variants: source.variants, + identifier_hash: source.identifier_hash, + })) + } + SourceRecordData::Partial(partial) => { + Err(UnresolvedPixiRecord::Source(source_record::SourceRecord { + data: SourceRecordData::Partial(partial), + manifest_source: source.manifest_source, + build_source: source.build_source, + variants: source.variants, + identifier_hash: source.identifier_hash, + })) + } + }, + } + } +} + +impl From for UnresolvedPixiRecord { + fn from(record: PixiRecord) -> Self { + match record { + PixiRecord::Binary(r) => UnresolvedPixiRecord::Binary(r), + PixiRecord::Source(r) => UnresolvedPixiRecord::Source(r.into()), + } + } +} + #[derive(Debug, Error)] pub enum ParseLockFileError { #[error("missing field/fields '{1}' for package {0}")] diff --git a/crates/pixi_record/src/pinned_source.rs b/crates/pixi_record/src/pinned_source.rs index 3a07ec0814..f363cd8ec4 100644 --- a/crates/pixi_record/src/pinned_source.rs +++ b/crates/pixi_record/src/pinned_source.rs @@ -164,13 +164,11 @@ impl PinnedSourceSpec { /// }, /// }); /// - /// let source_spec = SourceSpec { - /// location: SourceLocationSpec::Git(GitSpec { - /// git: Url::parse("https://github.com/user/repo.git")?, - /// rev: None, - /// subdirectory: Default::default(), - /// }), - /// }; + /// let source_spec = SourceLocationSpec::Git(GitSpec { + /// git: Url::parse("https://github.com/user/repo.git")?, + /// rev: None, + /// subdirectory: Default::default(), + /// }); /// /// assert!(pinned_git.matches_source_spec(&source_spec)); /// # Ok(()) diff --git a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap index 859647d89d..ac82fb7cd3 100644 --- a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap +++ b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap @@ -38,24 +38,12 @@ packages: package_build_source: path: ../non-nested - source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe - version: 2.4.0 - build: h901236_0 - subdir: noarch package_build_source: path: ../src - source: path-child-test[9d062313] @ recipes/my-package - version: 2.1.0 - build: h890123_0 - subdir: noarch package_build_source: path: ../../src/lib - source: path-no-build-source[e11447f7] @ recipes/no-build - version: 2.5.0 - build: h901238_0 - subdir: noarch - source: path-sibling-test[2c2ab470] @ recipes/my-package - version: 2.0.0 - build: h789012_0 - subdir: noarch package_build_source: path: ../../other-package/src diff --git a/crates/pixi_record/src/source_record.rs b/crates/pixi_record/src/source_record.rs index c7726e2e10..7513f282de 100644 --- a/crates/pixi_record/src/source_record.rs +++ b/crates/pixi_record/src/source_record.rs @@ -1,7 +1,10 @@ use pixi_git::sha::GitSha; use pixi_spec::{GitReference, SourceLocationSpec}; -use rattler_conda_types::{MatchSpec, Matches, NamelessMatchSpec, PackageRecord}; -use rattler_lock::{CondaSourceData, GitShallowSpec, PackageBuildSource}; +use rattler_conda_types::{MatchSpec, Matches, NamelessMatchSpec, PackageName, PackageRecord}; +use rattler_lock::{ + CondaSourceData, FullSourceMetadata, GitShallowSpec, PackageBuildSource, + PartialSourceMetadata, SourceMetadata, +}; use std::fmt::{Display, Formatter}; use std::{ collections::{BTreeMap, HashMap}, @@ -67,10 +70,9 @@ impl From for PinnedSourceSpec { /// A record of a conda package that still requires building. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct SourceRecord { - /// Information about the conda package. This is metadata of the package - /// after it has been build. - pub package_record: PackageRecord, +pub struct SourceRecord { + /// Information about the conda package. + pub data: D, /// Exact definition of the source of the package. pub manifest_source: PinnedSourceSpec, @@ -83,146 +85,160 @@ pub struct SourceRecord { /// The variants that uniquely identify the way this package was built. pub variants: BTreeMap, + /// The short hash that was originally parsed from the lock file (e.g. + /// the 9f3c2a7b part of numba-cuda[9f3c2a7b] @ .). + /// + /// It's useful to reuse this identifier to avoid unnecessary lock-file + /// updates. If this field is None when serializing to the lock-file, it + /// will be regenerated based on the contents of this struct itself. + pub identifier_hash: Option, +} + +/// A source record with full metadata (package record + sources). +pub type FullSourceRecord = SourceRecord; + +/// A source record with only the package name (no metadata resolved yet). +pub type PartialSourceRecord = SourceRecord; + +/// A source record that may be full or partial. +pub type UnresolvedSourceRecord = SourceRecord; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct PartialSourceRecordData { + /// The package name of the source record. + pub name: PackageName, + + /// Dependencies on other packages (run-time requirements). + pub depends: Vec, + /// Specifies which packages are expected to be installed as source packages /// and from which location. pub sources: HashMap, } -impl SourceRecord { - /// Convert [`SourceRecord`] into lock-file compatible `CondaSourceData` - /// The `build_source` in the SourceRecord is always relative to the workspace. - /// However, when saving in the lock-file make these relative to the package manifest. - /// This should be used when writing to the lock file. - pub fn into_conda_source_data(self, _workspace_root: &Path) -> CondaSourceData { - let package_build_source = match self.build_source { - Some(PinnedBuildSourceSpec::Relative(path, _)) => Some(PackageBuildSource::Path { - path: Utf8TypedPathBuf::from(path), - }), - Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Url(pinned_url_spec))) => { - Some(PackageBuildSource::Url { - url: pinned_url_spec.url, - sha256: pinned_url_spec.sha256, - subdir: pinned_url_spec - .subdirectory - .to_option_string() - .map(Utf8TypedPathBuf::from), - }) - } - Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Git(pinned_git_spec))) => { - Some(PackageBuildSource::Git { - url: pinned_git_spec.git, - spec: to_git_shallow(&pinned_git_spec.source.reference), - rev: pinned_git_spec.source.commit.to_string(), - subdir: pinned_git_spec - .source - .subdirectory - .to_option_string() - .map(Utf8TypedPathBuf::from), - }) - } - Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Path(pinned_path_spec))) => { - Some(PackageBuildSource::Path { - path: pinned_path_spec.path, - }) - } - None => None, - }; +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct FullSourceRecordData { + #[serde(flatten)] + pub package_record: PackageRecord, - CondaSourceData { - package_record: self.package_record, - location: self.manifest_source.clone().into(), - package_build_source, - sources: self - .sources - .into_iter() - .map(|(k, v)| (k, v.into())) - .collect(), - variants: self - .variants - .into_iter() - .map(|(k, v)| (k, v.into())) - .collect(), + /// Specifies which packages are expected to be installed as source packages + /// and from which location. + pub sources: HashMap, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub enum SourceRecordData { + Partial(PartialSourceRecordData), + Full(FullSourceRecordData), +} + +impl SourceRecordData { + pub fn package_name(&self) -> &PackageName { + match self { + SourceRecordData::Partial(data) => &data.name, + SourceRecordData::Full(data) => &data.package_record.name, + } + } + + pub fn as_partial(&self) -> Option<&PartialSourceRecordData> { + if let SourceRecordData::Partial(data) = self { + Some(data) + } else { + None } } - /// Create SourceRecord from CondaSourceData with paths resolved relative to workspace_root. - /// This should be used when reading from the lock file. + pub fn as_full(&self) -> Option<&FullSourceRecordData> { + if let SourceRecordData::Full(data) = self { + Some(data) + } else { + None + } + } + + pub fn is_partial(&self) -> bool { + matches!(self, SourceRecordData::Partial(_)) + } + + pub fn is_full(&self) -> bool { + matches!(self, SourceRecordData::Full(_)) + } +} + +impl SourceRecord { + /// The pinned source location from the manifest. + pub fn manifest_source(&self) -> &PinnedSourceSpec { + &self.manifest_source + } + + /// The optional pinned build source. + pub fn build_source(&self) -> Option<&PinnedBuildSourceSpec> { + self.build_source.as_ref() + } + + /// The variants that identify how this package was built. + pub fn variants(&self) -> &BTreeMap { + &self.variants + } +} + +impl SourceRecord { + /// The name of the package. + pub fn name(&self) -> &PackageName { + &self.data.package_record.name + } + + /// The full package record. + pub fn package_record(&self) -> &PackageRecord { + &self.data.package_record + } + + /// Run-time dependencies. + pub fn depends(&self) -> &[String] { + &self.data.package_record.depends + } + + /// Source dependency locations. + pub fn sources(&self) -> &HashMap { + &self.data.sources + } + + /// Returns true if either the manifest source or build source is mutable + /// (i.e. path-based and may change over time). + pub fn has_mutable_source(&self) -> bool { + self.manifest_source.is_mutable() + || self + .build_source + .as_ref() + .is_some_and(|bs| bs.pinned().is_mutable()) + } + + /// Convert into lock-file compatible `CondaSourceData`. /// - /// The inverse of `into_conda_source_data`: - /// - manifest_source: relative to workspace_root (or absolute) → resolve to absolute - /// - build_source: relative to manifest_source (or absolute) → resolve to absolute - pub fn from_conda_source_data( - data: CondaSourceData, - _workspace_root: &std::path::Path, - ) -> Result { - let manifest_source: PinnedSourceSpec = data.location.try_into()?; - let build_source = match data.package_build_source { - None => None, - Some(PackageBuildSource::Path { path }) if path.is_relative() => { - let pinned = manifest_source.join(path.to_path()); - Some(PinnedBuildSourceSpec::Relative(path.to_string(), pinned)) + /// If either source (manifest or build) is mutable, the record is + /// downgraded to partial metadata so the lock-file only stores minimal + /// information for packages whose metadata may change. + pub fn into_conda_source_data(self, workspace_root: &Path) -> CondaSourceData { + let has_mutable = self.has_mutable_source(); + let mut unresolved = SourceRecord::::from(self); + if has_mutable { + // Downgrade full data to partial — keep only name, depends, and sources. + if let SourceRecordData::Full(full) = unresolved.data { + unresolved.data = SourceRecordData::Partial(PartialSourceRecordData { + name: full.package_record.name, + depends: full.package_record.depends, + sources: full.sources, + }); } - Some(PackageBuildSource::Path { path }) => Some(PinnedBuildSourceSpec::Absolute( - PinnedSourceSpec::Path(PinnedPathSpec { path }), - )), - Some(PackageBuildSource::Git { - url, - spec, - rev, - subdir, - }) => { - let reference = git_reference_from_shallow(spec, &rev); - Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Git( - PinnedGitSpec { - git: url, - source: PinnedGitCheckout { - commit: GitSha::from_str(&rev).unwrap(), - subdirectory: subdir - .and_then(|s| pixi_spec::Subdirectory::try_from(s.to_string()).ok()) - .unwrap_or_default(), - reference, - }, - }, - ))) - } - Some(PackageBuildSource::Url { - url, - sha256, - subdir, - }) => Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Url( - PinnedUrlSpec { - url, - sha256, - md5: None, - subdirectory: subdir - .and_then(|s| pixi_spec::Subdirectory::try_from(s.to_string()).ok()) - .unwrap_or_default(), - }, - ))), - }; - - Ok(Self { - package_record: data.package_record, - manifest_source, - build_source, - sources: data - .sources - .into_iter() - .map(|(k, v)| (k, SourceLocationSpec::from(v))) - .collect(), - variants: data - .variants - .into_iter() - .map(|(k, v)| (k, VariantValue::from(v))) - .collect(), - }) + } + unresolved.into_conda_source_data(workspace_root) } /// Returns true if this source record refers to the same output as the other source record. /// This is determined by comparing the package name, and either the variants (if both records have them) /// or the build, version and subdir (if variants are not present). - pub fn refers_to_same_output(&self, other: &SourceRecord) -> bool { - if self.package_record.name != other.package_record.name { + pub fn refers_to_same_output(&self, other: &SourceRecord) -> bool { + if self.data.package_record.name != other.data.package_record.name { return false; } @@ -234,9 +250,9 @@ impl SourceRecord { } } -impl Matches for NamelessMatchSpec { - fn matches(&self, pkg: &SourceRecord) -> bool { - if !self.matches(&pkg.package_record) { +impl Matches> for NamelessMatchSpec { + fn matches(&self, pkg: &SourceRecord) -> bool { + if !self.matches(&pkg.data.package_record) { return false; } @@ -250,9 +266,9 @@ impl Matches for NamelessMatchSpec { } } -impl Matches for MatchSpec { - fn matches(&self, pkg: &SourceRecord) -> bool { - if !self.matches(&pkg.package_record) { +impl Matches> for MatchSpec { + fn matches(&self, pkg: &SourceRecord) -> bool { + if !self.matches(&pkg.data.package_record) { return false; } @@ -266,9 +282,226 @@ impl Matches for MatchSpec { } } -impl AsRef for SourceRecord { +impl AsRef for SourceRecord { fn as_ref(&self) -> &PackageRecord { - &self.package_record + &self.data.package_record + } +} + +impl SourceRecord { + /// The name of the package. + pub fn name(&self) -> &PackageName { + &self.data.name + } + + /// Run-time dependencies. + pub fn depends(&self) -> &[String] { + &self.data.depends + } + + /// Source dependency locations. + pub fn sources(&self) -> &HashMap { + &self.data.sources + } +} + +impl SourceRecord { + /// The name of the package. + pub fn name(&self) -> &PackageName { + self.data.package_name() + } + + /// Run-time dependencies. + pub fn depends(&self) -> &[String] { + match &self.data { + SourceRecordData::Full(full) => &full.package_record.depends, + SourceRecordData::Partial(partial) => &partial.depends, + } + } + + /// Source dependency locations. + pub fn sources(&self) -> &HashMap { + match &self.data { + SourceRecordData::Full(full) => &full.sources, + SourceRecordData::Partial(partial) => &partial.sources, + } + } + + /// Convert into lock-file compatible `CondaSourceData`. + pub fn into_conda_source_data(self, _workspace_root: &Path) -> CondaSourceData { + let package_build_source = build_source_to_package_build_source(self.build_source); + + let metadata = match self.data { + SourceRecordData::Full(full) => SourceMetadata::Full(Box::new(FullSourceMetadata { + package_record: full.package_record, + sources: full.sources.into_iter().map(|(k, v)| (k, v.into())).collect(), + })), + SourceRecordData::Partial(partial) => { + SourceMetadata::Partial(PartialSourceMetadata { + name: partial.name, + depends: partial.depends, + sources: partial.sources.into_iter().map(|(k, v)| (k, v.into())).collect(), + }) + } + }; + + CondaSourceData { + location: self.manifest_source.clone().into(), + package_build_source, + variants: self + .variants + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(), + identifier_hash: self.identifier_hash, + metadata, + } + } + + /// Create from lock-file `CondaSourceData`. + pub fn from_conda_source_data( + data: CondaSourceData, + _workspace_root: &std::path::Path, + ) -> Result { + let manifest_source: PinnedSourceSpec = data.location.try_into()?; + let build_source = + package_build_source_to_build_source(data.package_build_source, &manifest_source)?; + + let record_data = match data.metadata { + SourceMetadata::Full(full) => SourceRecordData::Full(FullSourceRecordData { + package_record: full.package_record, + sources: full + .sources + .into_iter() + .map(|(k, v)| (k, SourceLocationSpec::from(v))) + .collect(), + }), + SourceMetadata::Partial(partial) => { + SourceRecordData::Partial(PartialSourceRecordData { + name: partial.name, + depends: partial.depends, + sources: partial.sources.into_iter().map(|(k, v)| (k, SourceLocationSpec::from(v))).collect(), + }) + } + }; + + Ok(Self { + data: record_data, + manifest_source, + build_source, + variants: data + .variants + .into_iter() + .map(|(k, v)| (k, VariantValue::from(v))) + .collect(), + identifier_hash: data.identifier_hash, + }) + } +} + +/// Upcast from full to unresolved. +impl From> for SourceRecord { + fn from(record: SourceRecord) -> Self { + Self { + data: SourceRecordData::Full(record.data), + manifest_source: record.manifest_source, + build_source: record.build_source, + variants: record.variants, + identifier_hash: record.identifier_hash, + } + } +} + +/// Convert build source to rattler's PackageBuildSource. +fn build_source_to_package_build_source( + build_source: Option, +) -> Option { + match build_source { + Some(PinnedBuildSourceSpec::Relative(path, _)) => Some(PackageBuildSource::Path { + path: Utf8TypedPathBuf::from(path), + }), + Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Url(pinned_url_spec))) => { + Some(PackageBuildSource::Url { + url: pinned_url_spec.url, + sha256: pinned_url_spec.sha256, + subdir: pinned_url_spec + .subdirectory + .to_option_string() + .map(Utf8TypedPathBuf::from), + }) + } + Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Git(pinned_git_spec))) => { + Some(PackageBuildSource::Git { + url: pinned_git_spec.git, + spec: to_git_shallow(&pinned_git_spec.source.reference), + rev: pinned_git_spec.source.commit.to_string(), + subdir: pinned_git_spec + .source + .subdirectory + .to_option_string() + .map(Utf8TypedPathBuf::from), + }) + } + Some(PinnedBuildSourceSpec::Absolute(PinnedSourceSpec::Path(pinned_path_spec))) => { + Some(PackageBuildSource::Path { + path: pinned_path_spec.path, + }) + } + None => None, + } +} + +/// Convert rattler's PackageBuildSource back to PinnedBuildSourceSpec. +fn package_build_source_to_build_source( + pbs: Option, + manifest_source: &PinnedSourceSpec, +) -> Result, ParseLockFileError> { + match pbs { + None => Ok(None), + Some(PackageBuildSource::Path { path }) if path.is_relative() => { + let pinned = manifest_source.join(path.to_path()); + Ok(Some(PinnedBuildSourceSpec::Relative( + path.to_string(), + pinned, + ))) + } + Some(PackageBuildSource::Path { path }) => Ok(Some(PinnedBuildSourceSpec::Absolute( + PinnedSourceSpec::Path(PinnedPathSpec { path }), + ))), + Some(PackageBuildSource::Git { + url, + spec, + rev, + subdir, + }) => { + let reference = git_reference_from_shallow(spec, &rev); + Ok(Some(PinnedBuildSourceSpec::Absolute( + PinnedSourceSpec::Git(PinnedGitSpec { + git: url, + source: PinnedGitCheckout { + commit: GitSha::from_str(&rev).unwrap(), + subdirectory: subdir + .and_then(|s| pixi_spec::Subdirectory::try_from(s.to_string()).ok()) + .unwrap_or_default(), + reference, + }, + }), + ))) + } + Some(PackageBuildSource::Url { + url, + sha256, + subdir, + }) => Ok(Some(PinnedBuildSourceSpec::Absolute( + PinnedSourceSpec::Url(PinnedUrlSpec { + url, + sha256, + md5: None, + subdirectory: subdir + .and_then(|s| pixi_spec::Subdirectory::try_from(s.to_string()).ok()) + .unwrap_or_default(), + }), + ))), } } @@ -300,12 +533,14 @@ mod tests { Channel, CondaPackageData, DEFAULT_ENVIRONMENT_NAME, LockFile, LockFileBuilder, }; + type SourceRecord = super::SourceRecord; + #[test] fn roundtrip_conda_source_data() { let workspace_root = Path::new("/workspace"); - // Load the lock file from the snapshot content (skip insta frontmatter). - let lock_source = lock_source_from_snapshot(); + // Load the lock file from a static fixture with full metadata for all records. + let lock_source = lock_source_from_fixture(); let lock_file = LockFile::from_str_with_base_directory(&lock_source, Some(Path::new("/workspace"))) .expect("failed to load lock file fixture"); @@ -320,15 +555,32 @@ mod tests { .flat_map(|(_, packages)| packages.filter_map(|pkg| pkg.as_source().cloned())) .collect(); - // Convert to SourceRecords and roundtrip back to CondaSourceData. + // Convert to full SourceRecords (input fixture always has full metadata). let roundtrip_records: Vec = conda_sources .iter() .map(|conda_data| { - SourceRecord::from_conda_source_data(conda_data.clone(), workspace_root) - .expect("from_conda_source_data should succeed") + let unresolved = super::SourceRecord::::from_conda_source_data( + conda_data.clone(), + workspace_root, + ) + .expect("from_conda_source_data should succeed"); + match unresolved.data { + SourceRecordData::Full(full) => super::SourceRecord { + data: full, + manifest_source: unresolved.manifest_source, + build_source: unresolved.build_source, + variants: unresolved.variants, + identifier_hash: unresolved.identifier_hash, + }, + SourceRecordData::Partial(_) => { + panic!("fixture should only contain full source records") + } + } }) .collect(); + // Write back — mutable (path) records should become partial, + // immutable (git) records stay full. let roundtrip_lock = build_lock_from_records(&roundtrip_records, workspace_root); let mut settings = insta::Settings::clone_current(); settings.set_sort_maps(true); @@ -337,30 +589,21 @@ mod tests { }); } - /// Extract the lock file body from the snapshot by skipping the insta frontmatter. - fn lock_source_from_snapshot() -> String { - let snapshot_path = Path::new(env!("CARGO_MANIFEST_DIR")).join( - "src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap", - ); + /// Load the lock file body from a static fixture file with full metadata. + fn lock_source_from_fixture() -> String { + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("src/test_fixtures/full_source_records.lock"); #[allow(clippy::disallowed_methods)] - let snap = std::fs::read_to_string(snapshot_path).expect("failed to read snapshot file"); - // Skip insta frontmatter (two --- delimiters) and return the lock file contents - snap.splitn(3, "---") - .nth(2) - .map(|s| s.trim_start_matches('\n')) - .unwrap_or_default() - .to_string() + std::fs::read_to_string(fixture_path).expect("failed to read fixture file") } /// Build a lock file string from a set of SourceRecords. fn build_lock_from_records(records: &[SourceRecord], workspace_root: &Path) -> String { - // Collect all unique platforms from the records + // Collect all unique platforms from the records (using the package_record's subdir). let platforms: std::collections::HashSet = records .iter() .map(|r| { - let conda_data = - CondaPackageData::from(r.clone().into_conda_source_data(workspace_root)); - Platform::from_str(&conda_data.record().subdir) + Platform::from_str(&r.package_record().subdir) .expect("failed to parse platform from subdir") }) .collect(); @@ -383,11 +626,10 @@ mod tests { ); for record in records { + let platform = Platform::from_str(&record.package_record().subdir) + .expect("failed to parse platform from subdir"); let conda_data = CondaPackageData::from(record.clone().into_conda_source_data(workspace_root)); - - let platform = Platform::from_str(&conda_data.record().subdir) - .expect("failed to parse platform from subdir"); builder .add_conda_package(DEFAULT_ENVIRONMENT_NAME, &platform.to_string(), conda_data) .expect("platform was registered"); @@ -442,4 +684,143 @@ mod tests { GitReference::DefaultBranch )); } + + #[test] + fn partial_source_record_roundtrip() { + use crate::{PinnedPathSpec, PinnedSourceSpec}; + + let workspace_root = Path::new("/workspace"); + + // Create a partial source record. + let partial = super::SourceRecord:: { + data: SourceRecordData::Partial(PartialSourceRecordData { + name: PackageName::from_str("my-package").unwrap(), + depends: vec!["numpy >=1.0".to_string()], + sources: HashMap::new(), + }), + manifest_source: PinnedSourceSpec::Path(PinnedPathSpec { + path: typed_path::Utf8TypedPathBuf::from("./my-package"), + }), + build_source: None, + variants: BTreeMap::from([("python".into(), crate::VariantValue::from("3.12".to_string()))]), + identifier_hash: Some("abcd1234".to_string()), + }; + + assert_eq!(partial.name().as_source(), "my-package"); + + // Roundtrip through CondaSourceData. + let conda_data = partial.into_conda_source_data(workspace_root); + let roundtripped = + super::SourceRecord::::from_conda_source_data( + conda_data, + workspace_root, + ) + .expect("from_conda_source_data should succeed"); + + assert_eq!(roundtripped.name().as_source(), "my-package"); + assert!(roundtripped.data.is_partial()); + assert_eq!( + roundtripped.variants.get("python").map(|v| v.to_string()), + Some("3.12".to_string()) + ); + assert_eq!(roundtripped.identifier_hash.as_deref(), Some("abcd1234")); + } + + #[test] + fn try_into_resolved_with_full_record() { + use crate::{PixiRecord, UnresolvedPixiRecord}; + + let workspace_root = Path::new("/workspace"); + + let lock_source = lock_source_from_fixture(); + let lock_file = + LockFile::from_str_with_base_directory(&lock_source, Some(Path::new("/workspace"))) + .expect("failed to load lock file fixture"); + + let environment = lock_file + .default_environment() + .expect("expected default environment"); + + let conda_source = environment + .conda_packages_by_platform() + .flat_map(|(_, packages)| packages.filter_map(|pkg| pkg.as_source().cloned())) + .next() + .expect("expected at least one source package"); + + let unresolved = + UnresolvedPixiRecord::from_conda_package_data( + CondaPackageData::Source(conda_source), + workspace_root, + ) + .expect("from_conda_package_data should succeed"); + + let resolved = unresolved.try_into_resolved(); + assert!(resolved.is_ok()); + assert!(matches!(resolved.unwrap(), PixiRecord::Source(_))); + } + + #[test] + fn try_into_resolved_with_partial_record() { + use crate::{PinnedPathSpec, PinnedSourceSpec, UnresolvedPixiRecord}; + + let partial = UnresolvedPixiRecord::Source(super::SourceRecord:: { + data: SourceRecordData::Partial(PartialSourceRecordData { + name: PackageName::from_str("partial-pkg").unwrap(), + depends: vec![], + sources: HashMap::new(), + }), + manifest_source: PinnedSourceSpec::Path(PinnedPathSpec { + path: typed_path::Utf8TypedPathBuf::from("./partial-pkg"), + }), + build_source: None, + variants: BTreeMap::new(), + identifier_hash: None, + }); + + let result = partial.try_into_resolved(); + assert!(result.is_err()); + let still_partial = result.unwrap_err(); + assert_eq!(still_partial.name().as_source(), "partial-pkg"); + } + + #[test] + fn full_upcast_roundtrip() { + let workspace_root = Path::new("/workspace"); + + // Load a full record from snapshot. + let lock_source = lock_source_from_fixture(); + let lock_file = + LockFile::from_str_with_base_directory(&lock_source, Some(Path::new("/workspace"))) + .expect("failed to load lock file fixture"); + + let environment = lock_file + .default_environment() + .expect("expected default environment"); + + let conda_source = environment + .conda_packages_by_platform() + .flat_map(|(_, packages)| packages.filter_map(|pkg| pkg.as_source().cloned())) + .next() + .expect("expected at least one source package"); + + // Parse as unresolved record (first record in fixture is git = immutable = full). + let unresolved = + super::SourceRecord::::from_conda_source_data( + conda_source, + workspace_root, + ) + .expect("from_conda_source_data should succeed"); + assert!(unresolved.data.is_full()); + + // Roundtrip through CondaSourceData. + let conda_data = unresolved.into_conda_source_data(workspace_root); + let roundtripped = + super::SourceRecord::::from_conda_source_data( + conda_data, + workspace_root, + ) + .expect("roundtrip should succeed"); + + assert!(roundtripped.data.is_full()); + } } diff --git a/crates/pixi_record/src/test_fixtures/full_source_records.lock b/crates/pixi_record/src/test_fixtures/full_source_records.lock new file mode 100644 index 0000000000..af3e083bff --- /dev/null +++ b/crates/pixi_record/src/test_fixtures/full_source_records.lock @@ -0,0 +1,57 @@ +version: 7 +platforms: +- name: noarch +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + noarch: + - conda_source: git-child-test[7ed0aa73] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + - conda_source: git-no-manifest-subdir[f0ed072f] @ git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 + - conda_source: git-sibling-test[cb1b107e] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + - conda_source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe + - conda_source: path-child-test[9d062313] @ recipes/my-package + - conda_source: path-no-build-source[e11447f7] @ recipes/no-build + - conda_source: path-sibling-test[2c2ab470] @ recipes/my-package +packages: +- conda_source: git-child-test[7ed0aa73] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + version: 1.1.0 + build: h234567_0 + subdir: noarch + source: + path: ../src +- conda_source: git-no-manifest-subdir[f0ed072f] @ git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 + version: 3.0.0 + build: h901237_0 + subdir: noarch + source: + path: build/subdir +- conda_source: git-sibling-test[cb1b107e] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 + version: 1.0.0 + build: h123456_0 + subdir: noarch + source: + path: ../non-nested +- conda_source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe + version: 2.4.0 + build: h901236_0 + subdir: noarch + source: + path: ../src +- conda_source: path-child-test[9d062313] @ recipes/my-package + version: 2.1.0 + build: h890123_0 + subdir: noarch + source: + path: ../../src/lib +- conda_source: path-no-build-source[e11447f7] @ recipes/no-build + version: 2.5.0 + build: h901238_0 + subdir: noarch +- conda_source: path-sibling-test[2c2ab470] @ recipes/my-package + version: 2.0.0 + build: h789012_0 + subdir: noarch + source: + path: ../../other-package/src diff --git a/crates/pixi_spec/src/lib.rs b/crates/pixi_spec/src/lib.rs index ef3d17de22..3e5777d905 100644 --- a/crates/pixi_spec/src/lib.rs +++ b/crates/pixi_spec/src/lib.rs @@ -682,13 +682,15 @@ impl From for rattler_lock::source::SourceLocation { #[cfg(feature = "rattler_lock")] impl From for UrlSourceSpec { fn from(value: rattler_lock::source::UrlSourceLocation) -> Self { - let rattler_lock::source::UrlSourceLocation { url, md5, sha256 } = value; + let rattler_lock::source::UrlSourceLocation { url, md5, sha256, subdirectory } = value; Self { url, md5, sha256, - subdirectory: Subdirectory::default(), + subdirectory: subdirectory + .and_then(|s| Subdirectory::try_from(s).ok()) + .unwrap_or_default(), } } } @@ -700,6 +702,7 @@ impl From for rattler_lock::source::UrlSourceLocation { url: value.url, md5: value.md5, sha256: value.sha256, + subdirectory: value.subdirectory.to_option_string(), } } } diff --git a/crates/pypi_modifiers/src/pypi_tags.rs b/crates/pypi_modifiers/src/pypi_tags.rs index c198153670..a20874dddc 100644 --- a/crates/pypi_modifiers/src/pypi_tags.rs +++ b/crates/pypi_modifiers/src/pypi_tags.rs @@ -46,6 +46,11 @@ pub fn is_python_record(record: impl AsRef) -> bool { package_name_is_python(&record.as_ref().name) } +/// Returns true if the specified package name refers to a version/variant of python. +pub fn is_python_package_name(name: &PackageName) -> bool { + package_name_is_python(name) +} + /// Returns true if the specified name refers to a version/variant of python. /// TODO: Add support for more variants. pub fn package_name_is_python(record: &rattler_conda_types::PackageName) -> bool { diff --git a/examples/pixi-build/cpp-sdl/pixi.lock b/examples/pixi-build/cpp-sdl/pixi.lock index 5c218eb4d9..2860d4b4ba 100644 --- a/examples/pixi-build/cpp-sdl/pixi.lock +++ b/examples/pixi-build/cpp-sdl/pixi.lock @@ -1,136 +1,129 @@ -version: 6 +version: 7 +platforms: + - name: linux-64 + - name: win-64 + - name: osx-64 + - name: osx-arm64 environments: default: channels: - url: https://prefix.dev/pixi-build-backends/ - url: https://prefix.dev/conda-forge/ - options: - pypi-prerelease-mode: if-necessary-or-explicit packages: linux-64: - - conda: https://prefix.dev/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda - conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda + - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda - conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/icu-78.1-h33c6efd_0.conda - conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - conda: https://prefix.dev/conda-forge/linux-64/libcap-2.77-h3ff7636_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda - conda: https://prefix.dev/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda - conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda - conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda - conda: https://prefix.dev/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda - conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libopus-1.5.2-hd0c01bc_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libsndfile-1.2.2-hc7d488a_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda - - conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_3.conda - - conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_3.conda + - conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda + - conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda - conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/liburing-2.13-hb700be7_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - conda: https://prefix.dev/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda - conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda - conda: https://prefix.dev/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - conda: https://prefix.dev/conda-forge/linux-64/pulseaudio-client-17.0-h9a6aba3_3.conda - conda: https://prefix.dev/conda-forge/linux-64/sdl2-2.32.56-h54a6638_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.2.30-h3b84278_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda - conda: https://prefix.dev/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxscrnsaver-1.2.4-hb9d3cd8_0.conda - - conda: . - build: hb0f4dca_0 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - source: sdl_example[bd431642] @ . osx-64: - - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_8.conda + - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda - conda: https://prefix.dev/conda-forge/osx-64/dbus-1.16.2-h6e7f9a9_1.conda - conda: https://prefix.dev/conda-forge/osx-64/libcxx-22.1.0-h19cb2f5_1.conda - - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.3-heffb93a_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-h750e83c_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.3-hf241ffe_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda - conda: https://prefix.dev/conda-forge/osx-64/libiconv-1.18-h57a12c2_2.conda - conda: https://prefix.dev/conda-forge/osx-64/libintl-0.25.1-h3184127_1.conda - conda: https://prefix.dev/conda-forge/osx-64/libusb-1.0.29-h2287256_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.328.1-hfc0b2d5_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda - conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda - conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda - conda: https://prefix.dev/conda-forge/osx-64/sdl2-2.32.56-h53ec75d_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.2.30-h59d2431_0.conda - - conda: . - build: h0dc7051_0 + - conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda + - source: sdl_example[2815fc71] @ . osx-arm64: - - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_8.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda - conda: https://prefix.dev/conda-forge/osx-arm64/dbus-1.16.2-h3ff7a7c_1.conda - conda: https://prefix.dev/conda-forge/osx-arm64/libcxx-22.1.0-h55c6f16_1.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.3-haf25636_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-he5f378a_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.3-hfe11c1f_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda - conda: https://prefix.dev/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda - conda: https://prefix.dev/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/libusb-1.0.29-hbc156a2_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.328.1-h49c215f_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda - conda: https://prefix.dev/conda-forge/osx-arm64/sdl2-2.32.56-h248ca61_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.2.30-h6fa9c73_0.conda - - conda: . - build: h60d57d3_0 + - conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda + - source: sdl_example[f88715ba] @ . win-64: - conda: https://prefix.dev/conda-forge/win-64/libusb-1.0.29-h1839187_0.conda - - conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.328.1-h477610d_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda - conda: https://prefix.dev/conda-forge/win-64/sdl2-2.32.56-h5112557_0.conda - - conda: https://prefix.dev/conda-forge/win-64/sdl3-3.2.30-h5112557_0.conda + - conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda - conda: https://prefix.dev/conda-forge/win-64/vc-14.3-h41ae7f8_34.conda - conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.44.35208-h818238b_34.conda - conda: https://prefix.dev/conda-forge/win-64/vcomp14-14.44.35208-h818238b_34.conda - - conda: . - build: h659f713_0 + - source: sdl_example[6711f831] @ . packages: -- conda: https://prefix.dev/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 - md5: d7c89558ba9fa0495403155b64376d81 - license: None - size: 2562 - timestamp: 1578324546067 -- conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - build_number: 16 - sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 - md5: 73aaf86a425cc6e73fcf236a5a46396d - depends: - - _libgcc_mutex 0.1 conda_forge +- conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + build_number: 20 + sha256: 1dd3fffd892081df9726d7eb7e0dea6198962ba775bd88842135a4ddb4deb3c9 + md5: a9f577daf3de00bca7c3c76c0ecbd1de + depends: + - __glibc >=2.17,<3.0.a0 - libgomp >=7.5.0 constrains: - - openmp_impl 9999 + - openmp_impl <0.0a0 license: BSD-3-Clause license_family: BSD - size: 23621 - timestamp: 1650670423406 + size: 28948 + timestamp: 1770939786096 - conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda sha256: a9c114cbfeda42a226e2db1809a538929d2f118ef855372293bd188f71711c48 md5: 791365c5f65975051e4e017b5da3abf5 @@ -141,34 +134,34 @@ packages: license_family: GPL size: 68072 timestamp: 1756738968573 -- conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda - sha256: c30daba32ddebbb7ded490f0e371eae90f51e72db620554089103b4a6934b0d5 - md5: 51a19bba1b8ebfb60df25cde030b7ebc +- conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + sha256: 0b75d45f0bba3e95dc693336fa51f40ea28c980131fec438afb7ce6118ed05f6 + md5: d2ffd7602c02f2b316fd921d39876885 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 license: bzip2-1.0.6 license_family: BSD - size: 260341 - timestamp: 1757437258798 -- conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_8.conda - sha256: 8f50b58efb29c710f3cecf2027a8d7325ba769ab10c746eff75cea3ac050b10c - md5: 97c4b3bd8a90722104798175a1bdddbf + size: 260182 + timestamp: 1771350215188 +- conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + sha256: 9f242f13537ef1ce195f93f0cc162965d6cc79da578568d6d8e50f70dd025c42 + md5: 4173ac3b19ec0a4f400b4f782910368b depends: - __osx >=10.13 license: bzip2-1.0.6 license_family: BSD - size: 132607 - timestamp: 1757437730085 -- conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_8.conda - sha256: b456200636bd5fecb2bec63f7e0985ad2097cf1b83d60ce0b6968dffa6d02aa1 - md5: 58fd217444c2a5701a44244faf518206 + size: 133427 + timestamp: 1771350680709 +- conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + sha256: 540fe54be35fac0c17feefbdc3e29725cce05d7367ffedfaaa1bdda234b019df + md5: 620b85a3f45526a8bc4d23fd78fc22f0 depends: - __osx >=11.0 license: bzip2-1.0.6 license_family: BSD - size: 125061 - timestamp: 1757437486465 + size: 124834 + timestamp: 1771350416561 - conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda sha256: 8bb557af1b2b7983cf56292336a1a1853f26555d9c6cecf1e5b2b96838c9da87 md5: ce96f2f470d39bd96ce03945af92e280 @@ -206,17 +199,6 @@ packages: license: AFL-2.1 OR GPL-2.0-or-later size: 393811 timestamp: 1764536084131 -- conda: https://prefix.dev/conda-forge/linux-64/icu-78.1-h33c6efd_0.conda - sha256: 7d6463d0be5092b2ae8f2fad34dc84de83eab8bd44cc0d4be8931881c973c48f - md5: 518e9bbbc3e3486d6a4519192ba690f8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libstdcxx >=14 - license: MIT - license_family: MIT - size: 12722920 - timestamp: 1766299101259 - conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab md5: a8832b479f93521a9e7b5b743803be51 @@ -275,68 +257,68 @@ packages: license: LicenseRef-libglvnd size: 44840 timestamp: 1731330973553 -- conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda - sha256: 1e1b08f6211629cbc2efe7a5bca5953f8f6b3cae0eeb04ca4dacee1bd4e2db2f - md5: 8b09ae86839581147ef2e5c5e229d164 +- conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + sha256: d78f1d3bea8c031d2f032b760f36676d87929b18146351c4464c66b0869df3f5 + md5: e7f7ce06ec24cfcfb9e36d28cf82ba57 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 constrains: - - expat 2.7.3.* + - expat 2.7.4.* license: MIT license_family: MIT - size: 76643 - timestamp: 1763549731408 -- conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.3-heffb93a_0.conda - sha256: d11b3a6ce5b2e832f430fd112084533a01220597221bee16d6c7dc3947dffba6 - md5: 222e0732a1d0780a622926265bee14ef + size: 76798 + timestamp: 1771259418166 +- conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + sha256: 8d9d79b2de7d6f335692391f5281607221bf5d040e6724dad4c4d77cd603ce43 + md5: a684eb8a19b2aa68fde0267df172a1e3 depends: - __osx >=10.13 constrains: - - expat 2.7.3.* + - expat 2.7.4.* license: MIT license_family: MIT - size: 74058 - timestamp: 1763549886493 -- conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.3-haf25636_0.conda - sha256: fce22610ecc95e6d149e42a42fbc3cc9d9179bd4eb6232639a60f06e080eec98 - md5: b79875dbb5b1db9a4a22a4520f918e1a + size: 74578 + timestamp: 1771260142624 +- conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + sha256: 03887d8080d6a8fe02d75b80929271b39697ecca7628f0657d7afaea87761edf + md5: a92e310ae8dfc206ff449f362fc4217f depends: - __osx >=11.0 constrains: - - expat 2.7.3.* + - expat 2.7.4.* license: MIT license_family: MIT - size: 67800 - timestamp: 1763549994166 -- conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda - sha256: 25cbdfa65580cfab1b8d15ee90b4c9f1e0d72128f1661449c9a999d341377d54 - md5: 35f29eec58405aaf55e01cb470d8c26a + size: 68199 + timestamp: 1771260020767 +- conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + sha256: 31f19b6a88ce40ebc0d5a992c131f57d919f73c0b92cd1617a5bec83f6e961e6 + md5: a360c33a5abe61c07959e449fa1453eb depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 license: MIT license_family: MIT - size: 57821 - timestamp: 1760295480630 -- conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-h750e83c_0.conda - sha256: 277dc89950f5d97f1683f26e362d6dca3c2efa16cb2f6fdb73d109effa1cd3d0 - md5: d214916b24c625bcc459b245d509f22e + size: 58592 + timestamp: 1769456073053 +- conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + sha256: 951958d1792238006fdc6fce7f71f1b559534743b26cc1333497d46e5903a2d6 + md5: 66a0dc7464927d0853b590b6f53ba3ea depends: - __osx >=10.13 license: MIT license_family: MIT - size: 52573 - timestamp: 1760295626449 -- conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-he5f378a_0.conda - sha256: 9b8acdf42df61b7bfe8bdc545c016c29e61985e79748c64ad66df47dbc2e295f - md5: 411ff7cd5d1472bba0f55c0faf04453b + size: 53583 + timestamp: 1769456300951 +- conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + sha256: 6686a26466a527585e6a75cc2a242bf4a3d97d6d6c86424a441677917f28bec7 + md5: 43c04d9cb46ef176bb2a4c77e324d599 depends: - __osx >=11.0 license: MIT license_family: MIT - size: 40251 - timestamp: 1760295839166 + size: 40979 + timestamp: 1769456747661 - conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda sha256: e755e234236bdda3d265ae82e5b0581d259a9279e3e5b31d745dc43251ad64fb md5: 47595b9d53054907a00d95e4d47af1d6 @@ -350,28 +332,28 @@ packages: license_family: BSD size: 424563 timestamp: 1764526740626 -- conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda - sha256: 6eed58051c2e12b804d53ceff5994a350c61baf117ec83f5f10c953a3f311451 - md5: 6d0363467e6ed84f11435eb309f2ff06 +- conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + sha256: faf7d2017b4d718951e3a59d081eb09759152f93038479b768e3d612688f83f5 + md5: 0aa00f03f9e39fb9876085dee11a85d4 depends: - __glibc >=2.17,<3.0.a0 - _openmp_mutex >=4.5 constrains: - - libgcc-ng ==15.2.0=*_16 - - libgomp 15.2.0 he0feb66_16 + - libgcc-ng ==15.2.0=*_18 + - libgomp 15.2.0 he0feb66_18 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 1042798 - timestamp: 1765256792743 -- conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda - sha256: 5f07f9317f596a201cc6e095e5fc92621afca64829785e483738d935f8cab361 - md5: 5a68259fac2da8f2ee6f7bfe49c9eb8b + size: 1041788 + timestamp: 1771378212382 +- conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + sha256: e318a711400f536c81123e753d4c797a821021fb38970cebfb3f454126016893 + md5: d5e96b1ed75ca01906b3d2469b4ce493 depends: - - libgcc 15.2.0 he0feb66_16 + - libgcc 15.2.0 he0feb66_18 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 27256 - timestamp: 1765256804124 + size: 27526 + timestamp: 1771378224552 - conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda sha256: dc2752241fa3d9e40ce552c1942d0a4b5eeb93740c9723873f6fcf8d39ef8d2d md5: 928b8be80851f5d8ffb016f9c81dae7a @@ -382,9 +364,9 @@ packages: license: LicenseRef-libglvnd size: 134712 timestamp: 1731330998354 -- conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda - sha256: 82d6c2ee9f548c84220fb30fb1b231c64a53561d6e485447394f0a0eeeffe0e6 - md5: 034bea55a4feef51c98e8449938e9cee +- conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda + sha256: a27e44168a1240b15659888ce0d9b938ed4bdb49e9ea68a7c1ff27bcea8b55ce + md5: bb26456332b07f68bf3b7622ed71c0da depends: - __glibc >=2.17,<3.0.a0 - libffi >=3.5.2,<3.6.0a0 @@ -393,28 +375,28 @@ packages: - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.47,<10.48.0a0 constrains: - - glib 2.86.3 *_0 + - glib 2.86.4 *_1 license: LGPL-2.1-or-later - size: 3946542 - timestamp: 1765221858705 -- conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.3-hf241ffe_0.conda - sha256: d205ecdd0873dd92f7b55ac9b266b2eb09236ff5f3b26751579e435bbaed499c - md5: 584ce14b08050d3f1a25ab429b9360bc + size: 4398701 + timestamp: 1771863239578 +- conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda + sha256: d45fd67e18e793aeb2485a7efe3e882df594601ed6136ed1863c56109e4ad9e3 + md5: b8437d8dc24f46da3565d7f0c5a96d45 depends: - - __osx >=10.13 + - __osx >=11.0 - libffi >=3.5.2,<3.6.0a0 - libiconv >=1.18,<2.0a0 - libintl >=0.25.1,<1.0a0 - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.47,<10.48.0a0 constrains: - - glib 2.86.3 *_0 + - glib 2.86.4 *_1 license: LGPL-2.1-or-later - size: 3708599 - timestamp: 1765222438844 -- conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.3-hfe11c1f_0.conda - sha256: 801c1835aa35a4f6e45e2192ad668bd7238d95c90ef8f02c52ce859c20117285 - md5: 057c7247514048ebdaf89373b263ebee + size: 4186085 + timestamp: 1771863964173 +- conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + sha256: a4254a241a96198e019ced2e0d2967e4c0ef64fac32077a45c065b32dc2b15d2 + md5: 673069f6725ed7b1073f9b96094294d1 depends: - __osx >=11.0 - libffi >=3.5.2,<3.6.0a0 @@ -423,10 +405,10 @@ packages: - libzlib >=1.3.1,<2.0a0 - pcre2 >=10.47,<10.48.0a0 constrains: - - glib 2.86.3 *_0 + - glib 2.86.4 *_1 license: LGPL-2.1-or-later - size: 3670602 - timestamp: 1765223125237 + size: 4108927 + timestamp: 1771864169970 - conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda sha256: 1175f8a7a0c68b7f81962699751bb6574e6f07db4c9f72825f978e3016f46850 md5: 434ca7e50e40f4918ab701e3facd59a0 @@ -445,15 +427,15 @@ packages: license: LicenseRef-libglvnd size: 75504 timestamp: 1731330988898 -- conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda - sha256: 5b3e5e4e9270ecfcd48f47e3a68f037f5ab0f529ccb223e8e5d5ac75a58fc687 - md5: 26c46f90d0e727e95c6c9498a33a09f3 +- conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + sha256: 21337ab58e5e0649d869ab168d4e609b033509de22521de1bfed0c031bfc5110 + md5: 239c5e9546c38a1e884d69effcf4c882 depends: - __glibc >=2.17,<3.0.a0 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 603284 - timestamp: 1765256703881 + size: 603262 + timestamp: 1771378117851 - conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda sha256: c467851a7312765447155e071752d7bf9bf44d610a5687e32706f480aad2833f md5: 915f5995e94f60e9a4826e0b0920ee88 @@ -497,17 +479,17 @@ packages: license: LGPL-2.1-or-later size: 90957 timestamp: 1751558394144 -- conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda - sha256: f2591c0069447bbe28d4d696b7fcb0c5bd0b4ac582769b89addbcf26fb3430d8 - md5: 1a580f7796c7bf6393fddb8bbbde58dc +- conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + sha256: 755c55ebab181d678c12e49cced893598f2bab22d582fbbf4d8b83c18be207eb + md5: c7c83eecbb72d88b940c249af56c8b17 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - libgcc >=14 constrains: - - xz 5.8.1.* + - xz 5.8.2.* license: 0BSD - size: 112894 - timestamp: 1749230047870 + size: 113207 + timestamp: 1768752626120 - conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda sha256: ffb066ddf2e76953f92e06677021c73c85536098f1c21fcd15360dbc859e22e4 md5: 68e52064ed3897463c0e958ab5c8f91b @@ -518,16 +500,16 @@ packages: license_family: BSD size: 218500 timestamp: 1745825989535 -- conda: https://prefix.dev/conda-forge/linux-64/libopus-1.5.2-hd0c01bc_0.conda - sha256: 786d43678d6d1dc5f88a6bad2d02830cfd5a0184e84a8caa45694049f0e3ea5f - md5: b64523fb87ac6f87f0790f324ad43046 +- conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda + sha256: f1061a26213b9653bbb8372bfa3f291787ca091a9a3060a10df4d5297aad74fd + md5: 2446ac1fe030c2aa6141386c1f5a6aed depends: - - libgcc >=13 - __glibc >=2.17,<3.0.a0 + - libgcc >=14 license: BSD-3-Clause license_family: BSD - size: 312472 - timestamp: 1744330953241 + size: 324993 + timestamp: 1768497114401 - conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda sha256: 0bd91de9b447a2991e666f284ae8c722ffb1d84acb594dbd0c031bd656fa32b2 md5: 70e3400cbbfa03e96dcde7fc13e38c7b @@ -555,38 +537,38 @@ packages: license_family: LGPL size: 355619 timestamp: 1765181778282 -- conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda - sha256: 813427918316a00c904723f1dfc3da1bbc1974c5cfe1ed1e704c6f4e0798cbc6 - md5: 68f68355000ec3f1d6f26ea13e8f525f +- conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + sha256: 78668020064fdaa27e9ab65cd2997e2c837b564ab26ce3bf0e58a2ce1a525c6e + md5: 1b08cd684f34175e4514474793d44bcb depends: - __glibc >=2.17,<3.0.a0 - - libgcc 15.2.0 he0feb66_16 + - libgcc 15.2.0 he0feb66_18 constrains: - - libstdcxx-ng ==15.2.0=*_16 + - libstdcxx-ng ==15.2.0=*_18 license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 5856456 - timestamp: 1765256838573 -- conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_3.conda - sha256: b3a7f89462dc95c1bba9f663210d20ff3ac5f7db458684e0f3a7ae5784f8c132 - md5: 70d1de6301b58ed99fea01490a9802a3 + size: 5852330 + timestamp: 1771378262446 +- conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda + sha256: f0356bb344a684e7616fc84675cfca6401140320594e8686be30e8ac7547aed2 + md5: 1d4c18d75c51ed9d00092a891a547a7d depends: - __glibc >=2.17,<3.0.a0 - libcap >=2.77,<2.78.0a0 - libgcc >=14 license: LGPL-2.1-or-later - size: 491268 - timestamp: 1765552759709 -- conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_3.conda - sha256: 977e7e4955ea1581e441e429c2c1b498bc915767f1cac77a97b283c469d5298c - md5: 3934f4cf65a06100d526b33395fb9cd2 + size: 491953 + timestamp: 1770738638119 +- conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda + sha256: ed4d2c01fbeb1330f112f7e399408634db277d3dfb2dec1d0395f56feaa24351 + md5: 6c74fba677b61a0842cbf0f63eee683b depends: - __glibc >=2.17,<3.0.a0 - libcap >=2.77,<2.78.0a0 - libgcc >=14 license: LGPL-2.1-or-later - size: 145023 - timestamp: 1765552781358 + size: 144654 + timestamp: 1770738650966 - conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda sha256: 71c8b9d5c72473752a0bb6e91b01dd209a03916cb71f36cc6a564e3a2a132d7a md5: e179a69edd30d75c0144d7a380b88f28 @@ -598,17 +580,17 @@ packages: license_family: MIT size: 75995 timestamp: 1757032240102 -- conda: https://prefix.dev/conda-forge/linux-64/liburing-2.13-hb700be7_0.conda - sha256: 5e4863d8cc9ccba7884f68d5b3c4b4f44a5a836ad7d3b332ac9aaaef0c0b9d45 - md5: 60adb61326a4a0072ed238f460b02029 +- conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda + sha256: 3d17b7aa90610afc65356e9e6149aeac0b2df19deda73a51f0a09cf04fd89286 + md5: 56f65185b520e016d29d01657ac02c0d depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 - libstdcxx >=14 license: MIT license_family: MIT - size: 132334 - timestamp: 1765872504784 + size: 154203 + timestamp: 1770566529700 - conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda sha256: 89c84f5b26028a9d0f5c4014330703e7dff73ba0c98f90103e9cef6b43a5323c md5: d17e3fb595a9f24fa9e149239a33475d @@ -662,61 +644,58 @@ packages: license_family: BSD size: 285894 timestamp: 1753879378005 -- conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda - sha256: bbabc5c48b63ff03f440940a11d4648296f5af81bb7630d98485405cd32ac1ce - md5: 372a62464d47d9e966b630ffae3abe73 +- conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + sha256: a68280d57dfd29e3d53400409a39d67c4b9515097eba733aa6fe00c880620e2b + md5: 31ad065eda3c2d88f8215b1289df9c89 depends: - __glibc >=2.17,<3.0.a0 - libstdcxx >=14 - libgcc >=14 - xorg-libx11 >=1.8.12,<2.0a0 - - xorg-libxrandr >=1.5.4,<2.0a0 + - xorg-libxrandr >=1.5.5,<2.0a0 constrains: - - libvulkan-headers 1.4.328.1.* + - libvulkan-headers 1.4.341.0.* license: Apache-2.0 license_family: APACHE - size: 197672 - timestamp: 1759972155030 -- conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.328.1-hfc0b2d5_0.conda - sha256: edb4f98fd148b8e5e7a6fc8bc7dc56322a4a9e02b66239a6dd2a1e8529f0bb18 - md5: fd024b256ad86089211ceec4a757c030 + size: 199795 + timestamp: 1770077125520 +- conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda + sha256: ce9bc992ffffdefbde5f7977b0a3ad9036650f8323611e4024908755891674e0 + md5: dcce6338514e65c2b7fdf172f1264561 depends: - - libcxx >=19 - __osx >=10.13 + - libcxx >=19 constrains: - - libvulkan-headers 1.4.328.1.* + - libvulkan-headers 1.4.341.0.* license: Apache-2.0 license_family: APACHE - size: 180230 - timestamp: 1759972143485 -- conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.328.1-h49c215f_0.conda - sha256: 7cdf4f61f38dad4765762d1e8f916c81e8221414911012f8aba294f5dce0e0ba - md5: 978586f8c141eed794868a8f9834e3b0 + size: 182703 + timestamp: 1770077140315 +- conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda + sha256: d2790dafc9149b1acd45b9033d02cfa3f3e9ee5af97bd61e0a5718c414a0a135 + md5: 6b4c9a5b130759136a0dde0c373cb0ea depends: - - libcxx >=19 - __osx >=11.0 + - libcxx >=19 constrains: - - libvulkan-headers 1.4.328.1.* + - libvulkan-headers 1.4.341.0.* license: Apache-2.0 license_family: APACHE - size: 177829 - timestamp: 1759972150912 -- conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.328.1-h477610d_0.conda - sha256: 934d676c445c1ea010753dfa98680b36a72f28bec87d15652f013c91a1d8d171 - md5: 4403eae6c81f448d63a7f66c0b330536 + size: 180304 + timestamp: 1770077143460 +- conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda + sha256: 0f0965edca8b255187604fc7712c53fe9064b31a1845a7dfb2b63bf660de84a7 + md5: 804880b2674119b84277d6c16b01677d depends: - vc >=14.3,<15 - vc14_runtime >=14.44.35208 - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 constrains: - - libvulkan-headers 1.4.328.1.* + - libvulkan-headers 1.4.341.0.* license: Apache-2.0 license_family: APACHE - size: 280488 - timestamp: 1759972163692 + size: 282251 + timestamp: 1770077165680 - conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa md5: 92ed62436b625154323d40d5f2f11dd7 @@ -746,37 +725,38 @@ packages: license_family: MIT size: 837922 timestamp: 1764794163823 -- conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda - sha256: 047be059033c394bd32ae5de66ce389824352120b3a7c0eff980195f7ed80357 - md5: 417955234eccd8f252b86a265ccdab7f +- conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda + sha256: a9612f88139197b2777a00325c72d872507e70d4f4111021f65e55797f97de67 + md5: 672c49f67192f0a7c2fa55986219d197 depends: - __glibc >=2.17,<3.0.a0 - - icu >=78.1,<79.0a0 - libgcc >=14 - libiconv >=1.18,<2.0a0 - - liblzma >=5.8.1,<6.0a0 - - libxml2-16 2.15.1 hca6bf5a_1 + - liblzma >=5.8.2,<6.0a0 + - libxml2-16 2.15.2 hf2a90c1_0 - libzlib >=1.3.1,<2.0a0 + constrains: + - icu <0.0a0 license: MIT license_family: MIT - size: 45402 - timestamp: 1766327161688 -- conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda - sha256: 8331284bf9ae641b70cdc0e5866502dd80055fc3b9350979c74bb1d192e8e09e - md5: 3fdd8d99683da9fe279c2f4cecd1e048 + size: 46783 + timestamp: 1772704627225 +- conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda + sha256: 9448a9080c8c2d32964f4005a75bf9f5879e4e3163de23f8efe361f1d6234e2b + md5: e44f52764dd288c96c4676a967b7e112 depends: - __glibc >=2.17,<3.0.a0 - - icu >=78.1,<79.0a0 - libgcc >=14 - libiconv >=1.18,<2.0a0 - - liblzma >=5.8.1,<6.0a0 + - liblzma >=5.8.2,<6.0a0 - libzlib >=1.3.1,<2.0a0 constrains: - - libxml2 2.15.1 + - libxml2 2.15.2 + - icu <0.0a0 license: MIT license_family: MIT - size: 555747 - timestamp: 1766327145986 + size: 556790 + timestamp: 1772704614138 - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 md5: edb0dca6bc32e4f4789199455a1dbeb8 @@ -931,94 +911,72 @@ packages: license: Zlib size: 572101 timestamp: 1757842925694 -- conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.2.30-h3b84278_0.conda - sha256: baff0dc170b83d2633093e25878d51db65a5d68200f1242db894fcd64e73a9f6 - md5: e275a47f63cca221ba9da6441c976ae2 +- conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda + sha256: 64b982664550e01c25f8f09333c0ee54d4764a80fe8636b8aaf881fe6e8a0dbe + md5: 88a69db027a8ff59dab972a09d69a1ab depends: - - libgcc >=14 - - libstdcxx >=14 - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - xorg-libxscrnsaver >=1.2.4,<2.0a0 + - libdrm >=2.4.125,<2.5.0a0 + - xorg-libxfixes >=6.0.2,<7.0a0 - libudev1 >=257.10 - - xorg-libxext >=1.3.6,<2.0a0 - - libxkbcommon >=1.13.1,<2.0a0 - - wayland >=1.24.0,<2.0a0 + - pulseaudio-client >=17.0,<17.1.0a0 + - xorg-libxtst >=1.2.5,<2.0a0 - libegl >=1.7.0,<2.0a0 - - dbus >=1.16.2,<2.0a0 - - xorg-libxfixes >=6.0.2,<7.0a0 - - libunwind >=1.8.3,<1.9.0a0 - - libvulkan-loader >=1.4.328.1,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 - xorg-libxcursor >=1.2.3,<2.0a0 - - libdrm >=2.4.125,<2.5.0a0 - - pulseaudio-client >=17.0,<17.1.0a0 - - xorg-libxscrnsaver >=1.2.4,<2.0a0 - - liburing >=2.13,<2.14.0a0 - - xorg-libx11 >=1.8.12,<2.0a0 - - libgl >=1.7.0,<2.0a0 + - xorg-libx11 >=1.8.13,<2.0a0 + - liburing >=2.14,<2.15.0a0 + - libxkbcommon >=1.13.1,<2.0a0 + - libunwind >=1.8.3,<1.9.0a0 - libusb >=1.0.29,<2.0a0 + - dbus >=1.16.2,<2.0a0 + - xorg-libxext >=1.3.7,<2.0a0 + - libgl >=1.7.0,<2.0a0 + - xorg-libxi >=1.8.2,<2.0a0 + - wayland >=1.24.0,<2.0a0 license: Zlib - size: 1938719 - timestamp: 1767236277588 -- conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.2.30-h59d2431_0.conda - sha256: 0cfb0e6feebc9f23a42750347d98ca0058d42acc19cf9d1b185b4af26bdf7c26 - md5: 6313dace95d93549a1d2aa6d945cee39 + size: 2138749 + timestamp: 1771668185803 +- conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda + sha256: b3939796f728f52be95a0f95c89bfd890af3a613fe7f6ab17c9fb6ea477812e8 + md5: bb826b1b04460daaef656f99432a5770 depends: + - __osx >=11.0 - libcxx >=19 - - __osx >=10.13 - - libvulkan-loader >=1.4.328.1,<2.0a0 - dbus >=1.16.2,<2.0a0 - libusb >=1.0.29,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 license: Zlib - size: 1551249 - timestamp: 1767236311096 -- conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.2.30-h6fa9c73_0.conda - sha256: 06c6f18b5e92eb0fab77066de8dd86c46df5a77b1bef087431eca49693a6e929 - md5: 0c203deff0f6d7edec03deced20bfbeb + size: 1696561 + timestamp: 1771668206362 +- conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda + sha256: e0589f700a9e9c188ba54c7ba5482885dc2e025f01de30fab098896cd6fda0a3 + md5: 5e999442b4391dcd702f6026ac1a23f2 depends: - libcxx >=19 - __osx >=11.0 - libusb >=1.0.29,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 - dbus >=1.16.2,<2.0a0 - - libvulkan-loader >=1.4.328.1,<2.0a0 license: Zlib - size: 1414980 - timestamp: 1767236319820 -- conda: https://prefix.dev/conda-forge/win-64/sdl3-3.2.30-h5112557_0.conda - sha256: 6b02bf3f1924bcf3d984a0535528e0b39ba99c5edc758f0d167b39cd33545479 - md5: 79242ec5d52eee72a224c252c294db62 + size: 1556104 + timestamp: 1771668215375 +- conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda + sha256: a4677774a9d542c6f4bac8779a2d7105748d38d8b7d56c8d02f36d14fba471b9 + md5: a0256884d35489e520360267e67ce3fc depends: - vc >=14.3,<15 - vc14_runtime >=14.44.35208 - ucrt >=10.0.20348.0 + - libvulkan-loader >=1.4.341.0,<2.0a0 - libusb >=1.0.29,<2.0a0 - - libvulkan-loader >=1.4.328.1,<2.0a0 license: Zlib - size: 1521101 - timestamp: 1767236315915 -- conda: . - name: sdl_example - version: 0.1.0 - build: h0dc7051_0 - subdir: osx-64 - variants: - target_platform: osx-64 - depends: - - libcxx >=22 - - sdl2 >=2.32.56,<3.0a0 -- conda: . - name: sdl_example - version: 0.1.0 - build: h60d57d3_0 - subdir: osx-arm64 - variants: - target_platform: osx-arm64 - depends: - - libcxx >=22 - - sdl2 >=2.32.56,<3.0a0 -- conda: . - name: sdl_example - version: 0.1.0 - build: h659f713_0 - subdir: win-64 + size: 1669623 + timestamp: 1771668231217 +- source: sdl_example[6711f831] @ . variants: cxx_compiler: vs2022 target_platform: win-64 @@ -1027,17 +985,25 @@ packages: - vc14_runtime >=14.44.35208 - ucrt >=10.0.20348.0 - sdl2 >=2.32.56,<3.0a0 -- conda: . - name: sdl_example - version: 0.1.0 - build: hb0f4dca_0 - subdir: linux-64 +- source: sdl_example[bd431642] @ . variants: target_platform: linux-64 depends: - libstdcxx >=15 - libgcc >=15 - sdl2 >=2.32.56,<3.0a0 +- source: sdl_example[2815fc71] @ . + variants: + target_platform: osx-64 + depends: + - libcxx >=22 + - sdl2 >=2.32.56,<3.0a0 +- source: sdl_example[f88715ba] @ . + variants: + target_platform: osx-arm64 + depends: + - libcxx >=22 + - sdl2 >=2.32.56,<3.0a0 - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda sha256: 3005729dce6f3d3f5ec91dfc49fc75a0095f9cd23bab49efb899657297ac91a5 md5: 71b24316859acd00bdb8b38f5e2ce328 @@ -1094,28 +1060,28 @@ packages: license_family: MIT size: 329779 timestamp: 1761174273487 -- conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda - sha256: aa03b49f402959751ccc6e21932d69db96a65a67343765672f7862332aa32834 - md5: 71ae752a748962161b4740eaff510258 +- conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + sha256: 19c2bb14bec84b0e995b56b752369775c75f1589314b43733948bb5f471a6915 + md5: b56e0c8432b56decafae7e78c5f29ba5 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=14 - - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libx11 >=1.8.13,<2.0a0 license: MIT license_family: MIT - size: 396975 - timestamp: 1759543819846 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda - sha256: 51909270b1a6c5474ed3978628b341b4d4472cd22610e5f22b506855a5e20f67 - md5: db038ce880f100acc74dba10302b5630 + size: 399291 + timestamp: 1772021302485 +- conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + sha256: 516d4060139dbb4de49a4dcdc6317a9353fb39ebd47789c14e6fe52de0deee42 + md5: 861fb6ccbc677bb9a9fb2468430b9c6a depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 + - libgcc >=14 - libxcb >=1.17.0,<2.0a0 license: MIT license_family: MIT - size: 835896 - timestamp: 1741901112627 + size: 839652 + timestamp: 1770819209719 - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda sha256: 6bc6ab7a90a5d8ac94c7e300cc10beb0500eeba4b99822768ca2f2ef356f731b md5: b2895afaf55bf96a8c8282a2e47a5de0 @@ -1149,17 +1115,17 @@ packages: license_family: MIT size: 20591 timestamp: 1762976546182 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda - sha256: da5dc921c017c05f38a38bd75245017463104457b63a1ce633ed41f214159c14 - md5: febbab7d15033c913d53c7a2c102309d +- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + sha256: 79c60fc6acfd3d713d6340d3b4e296836a0f8c51602327b32794625826bd052f + md5: 34e54f03dfea3e7a2dcf1453a85f1085 depends: - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 license: MIT license_family: MIT - size: 50060 - timestamp: 1727752228921 + size: 50326 + timestamp: 1769445253162 - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda sha256: 83c4c99d60b8784a611351220452a0a85b080668188dce5dfa394b723d7b64f4 md5: ba231da7fccf9ea1e768caf5c7099b84 @@ -1171,19 +1137,32 @@ packages: license_family: MIT size: 20071 timestamp: 1759282564045 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda - sha256: ac0f037e0791a620a69980914a77cb6bb40308e26db11698029d6708f5aa8e0d - md5: 2de7f99d6581a4a7adbff607b5c278ca +- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a + md5: 17dcc85db3c7886650b8908b183d6876 depends: - __glibc >=2.17,<3.0.a0 - libgcc >=13 - xorg-libx11 >=1.8.10,<2.0a0 - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxrender >=0.9.11,<0.10.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + license: MIT + license_family: MIT + size: 47179 + timestamp: 1727799254088 +- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + sha256: 80ed047a5cb30632c3dc5804c7716131d767089f65877813d4ae855ee5c9d343 + md5: e192019153591938acf7322b6459d36e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrender >=0.9.12,<0.10.0a0 license: MIT license_family: MIT - size: 29599 - timestamp: 1727794874300 + size: 30456 + timestamp: 1769445263457 - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda sha256: 044c7b3153c224c6cedd4484dd91b389d2d7fd9c776ad0f4a34f099b3389f4a1 md5: 96d57aba173e878a2089d5638016dc5e @@ -1207,3 +1186,16 @@ packages: license_family: MIT size: 14412 timestamp: 1727899730073 +- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a + md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxi >=1.7.10,<2.0a0 + license: MIT + license_family: MIT + size: 32808 + timestamp: 1727964811275 diff --git a/tests/data/satisfiability/pypi-dynamic-version/pixi.lock b/tests/data/satisfiability/pypi-dynamic-version/pixi.lock index 2478aa7356..3fdee90b0f 100644 --- a/tests/data/satisfiability/pypi-dynamic-version/pixi.lock +++ b/tests/data/satisfiability/pypi-dynamic-version/pixi.lock @@ -19,4 +19,4 @@ packages: depends: [] - pypi: ./dynamic-dep name: dynamic-dep - sha256: 93b37bda9cded35069601a8e2d2cecdba7a13fe9ac356985c53784c26461911d + sha256: ec39769fce8466d4438621a1d371aad180c4e8426a5a22632b0fe46b61cedb90 From 779d214f57a7607399fa888d9a7344f847dd038e Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 12 Mar 2026 11:25:28 +0100 Subject: [PATCH 10/15] fixes from code review --- Cargo.toml | 38 +- crates/pixi_api/src/workspace/list/mod.rs | 10 +- crates/pixi_api/src/workspace/list/package.rs | 40 +- crates/pixi_cli/src/tree.rs | 10 +- .../workspace/export/conda_explicit_spec.rs | 4 +- .../pixi_core/src/lock_file/install_subset.rs | 3 +- crates/pixi_core/src/lock_file/outdated.rs | 20 +- .../src/lock_file/satisfiability/mod.rs | 11 +- crates/pixi_core/src/lock_file/update.rs | 15 +- .../src/lock_file/virtual_packages.rs | 10 +- crates/pixi_core/src/workspace/mod.rs | 4 +- crates/pixi_diff/src/lib.rs | 23 +- crates/pixi_record/src/lib.rs | 65 ++- crates/pixi_record/src/source_record.rs | 381 +++++++++++++++--- crates/pixi_spec/src/lib.rs | 7 +- 15 files changed, 468 insertions(+), 173 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 960efde26e..1ebad8aa21 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -256,25 +256,25 @@ reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "d8efd77673c9a90792da9da31b6c0da7ea8a324b" } -coalesced_map = { path = "F:/projects/rattler/crates/coalesced_map" } -file_url = { path = "F:/projects/rattler/crates/file_url" } -rattler = { path = "F:/projects/rattler/crates/rattler" } -rattler_cache = { path = "F:/projects/rattler/crates/rattler_cache" } -rattler_conda_types = { path = "F:/projects/rattler/crates/rattler_conda_types" } -rattler_config = { path = "F:/projects/rattler/crates/rattler_config" } -rattler_digest = { path = "F:/projects/rattler/crates/rattler_digest" } -rattler_lock = { path = "F:/projects/rattler/crates/rattler_lock" } -rattler_menuinst = { path = "F:/projects/rattler/crates/rattler_menuinst" } -rattler_networking = { path = "F:/projects/rattler/crates/rattler_networking" } -rattler_package_streaming = { path = "F:/projects/rattler/crates/rattler_package_streaming" } -rattler_repodata_gateway = { path = "F:/projects/rattler/crates/rattler_repodata_gateway" } -rattler_shell = { path = "F:/projects/rattler/crates/rattler_shell" } -rattler_solve = { path = "F:/projects/rattler/crates/rattler_solve" } -rattler_upload = { path = "F:/projects/rattler/crates/rattler_upload" } -rattler_redaction = { path = "F:/projects/rattler/crates/rattler_redaction" } -rattler_virtual_packages = { path = "F:/projects/rattler/crates/rattler_virtual_packages" } -rattler_s3 = { path = "F:/projects/rattler/crates/rattler_s3" } -simple_spawn_blocking = { path = "F:/projects/rattler/crates/simple_spawn_blocking" } +coalesced_map = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +file_url = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_cache = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_conda_types = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_config = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_digest = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_lock = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_menuinst = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_networking = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_package_streaming = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_repodata_gateway = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_shell = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_solve = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_upload = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_redaction = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_virtual_packages = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +rattler_s3 = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } +simple_spawn_blocking = { git = "https://github.com/conda/rattler", branch ="feature/lockfile-v7" } [patch."https://github.com/prefix-dev/rattler-build"] #rattler-build = { path = "/var/home/tobias/src/rattler-build" } diff --git a/crates/pixi_api/src/workspace/list/mod.rs b/crates/pixi_api/src/workspace/list/mod.rs index 6cd2192191..3c61e80d32 100644 --- a/crates/pixi_api/src/workspace/list/mod.rs +++ b/crates/pixi_api/src/workspace/list/mod.rs @@ -87,12 +87,10 @@ pub async fn list( index_locations = pypi_options_to_index_locations(&environment.pypi_options(), workspace.root()) .into_diagnostic()?; - let record = python_record.record().expect("python record should have full metadata"); - tags = get_pypi_tags( - platform, - &environment.system_requirements(), - record, - )?; + let record = python_record + .record() + .expect("python record should have full metadata"); + tags = get_pypi_tags(platform, &environment.system_requirements(), record)?; Some(RegistryWheelIndex::new( &uv_context.cache, &tags, diff --git a/crates/pixi_api/src/workspace/list/package.rs b/crates/pixi_api/src/workspace/list/package.rs index 14c22feb3e..4f29bca646 100644 --- a/crates/pixi_api/src/workspace/list/package.rs +++ b/crates/pixi_api/src/workspace/list/package.rs @@ -133,7 +133,9 @@ impl Package { }; let sha256 = match package { - PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.sha256.map(|h| format!("{h:x}"))), + PackageExt::Conda(pkg) => pkg + .record() + .and_then(|r| r.sha256.map(|h| format!("{h:x}"))), PackageExt::PyPI(p, _) => p .hash .as_ref() @@ -156,23 +158,23 @@ impl Package { }; let timestamp = match package { - PackageExt::Conda(pkg) => pkg.record().and_then(|r| r.timestamp.map(|ts| ts.timestamp_millis())), + PackageExt::Conda(pkg) => pkg + .record() + .and_then(|r| r.timestamp.map(|ts| ts.timestamp_millis())), PackageExt::PyPI(_, _) => None, }; let noarch = match package { - PackageExt::Conda(pkg) => { - pkg.record().and_then(|r| { - let noarch_type = &r.noarch; - if noarch_type.is_python() { - Some("python".to_string()) - } else if noarch_type.is_generic() { - Some("generic".to_string()) - } else { - None - } - }) - } + PackageExt::Conda(pkg) => pkg.record().and_then(|r| { + let noarch_type = &r.noarch; + if noarch_type.is_python() { + Some("python".to_string()) + } else if noarch_type.is_generic() { + Some("generic".to_string()) + } else { + None + } + }), PackageExt::PyPI(_, _) => None, }; @@ -211,7 +213,10 @@ impl Package { }; let constrains = match package { - PackageExt::Conda(pkg) => pkg.record().map(|r| r.constrains.clone()).unwrap_or_default(), + PackageExt::Conda(pkg) => pkg + .record() + .map(|r| r.constrains.clone()) + .unwrap_or_default(), PackageExt::PyPI(_, _) => Vec::new(), }; @@ -221,7 +226,10 @@ impl Package { }; let track_features = match package { - PackageExt::Conda(pkg) => pkg.record().map(|r| r.track_features.clone()).unwrap_or_default(), + PackageExt::Conda(pkg) => pkg + .record() + .map(|r| r.track_features.clone()) + .unwrap_or_default(), PackageExt::PyPI(_, _) => Vec::new(), }; diff --git a/crates/pixi_cli/src/tree.rs b/crates/pixi_cli/src/tree.rs index 9799bf5e39..dcbf4cd324 100644 --- a/crates/pixi_cli/src/tree.rs +++ b/crates/pixi_cli/src/tree.rs @@ -125,7 +125,8 @@ pub(crate) fn extract_package_info( if let Some(conda_package) = package.as_conda() { let name = conda_package.name().as_normalized().to_string(); - let dependencies: Vec = conda_package.depends() + let dependencies: Vec = conda_package + .depends() .iter() .map(|d| { d.split_once(' ') @@ -179,9 +180,10 @@ pub fn generate_dependency_map(locked_deps: &[LockedPackageRef<'_>]) -> HashMap< Package { name: package_info.name, version: match package { - LockedPackageRef::Conda(conda_data) => { - conda_data.record().map(|r| r.version.to_string()).unwrap_or_default() - } + LockedPackageRef::Conda(conda_data) => conda_data + .record() + .map(|r| r.version.to_string()) + .unwrap_or_default(), LockedPackageRef::Pypi(pypi_data) => pypi_data.version_string(), }, dependencies: package_info diff --git a/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs b/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs index 700231db51..26b86b0fbd 100644 --- a/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs +++ b/crates/pixi_cli/src/workspace/export/conda_explicit_spec.rs @@ -150,8 +150,8 @@ fn render_env_platform( // Topologically sort packages let repodata = conda_packages_from_lockfile - .iter() - .map(|p| RepoDataRecord::try_from(p.clone())) + .into_iter() + .map(|p| RepoDataRecord::try_from(*p)) .collect::, _>>() .into_diagnostic() .with_context(|| "Failed to convert conda packages to RepoDataRecords")?; diff --git a/crates/pixi_core/src/lock_file/install_subset.rs b/crates/pixi_core/src/lock_file/install_subset.rs index 989e3dd610..3dc154e7ef 100644 --- a/crates/pixi_core/src/lock_file/install_subset.rs +++ b/crates/pixi_core/src/lock_file/install_subset.rs @@ -37,7 +37,8 @@ impl<'a> From> for PackageNode { let dependency_names: Vec = match package_ref { LockedPackageRef::Conda(conda_data) => { // Extract dependencies from conda data and parse as MatchSpec - conda_data.depends() + conda_data + .depends() .iter() .filter_map(|dep_spec| { // Parse as MatchSpec to get the package name diff --git a/crates/pixi_core/src/lock_file/outdated.rs b/crates/pixi_core/src/lock_file/outdated.rs index 6d3a7c78a2..d891371767 100644 --- a/crates/pixi_core/src/lock_file/outdated.rs +++ b/crates/pixi_core/src/lock_file/outdated.rs @@ -445,18 +445,16 @@ fn find_inconsistent_solve_groups<'p>( .flatten() { match package { - LockedPackageRef::Conda(pkg) => { - match conda_packages_by_name.get(pkg.name()) { - None => { - conda_packages_by_name - .insert(pkg.name().clone(), pkg.location().clone()); - } - Some(url) if pkg.location() != url => { - conda_package_mismatch = true; - } - _ => {} + LockedPackageRef::Conda(pkg) => match conda_packages_by_name.get(pkg.name()) { + None => { + conda_packages_by_name + .insert(pkg.name().clone(), pkg.location().clone()); } - } + Some(url) if pkg.location() != url => { + conda_package_mismatch = true; + } + _ => {} + }, LockedPackageRef::Pypi(pkg) => match pypi_packages_by_name.get(&pkg.name) { None => { pypi_packages_by_name.insert(pkg.name.clone(), pkg.location.clone()); diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index 5148bd740b..e5a1b28b58 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -1326,10 +1326,8 @@ pub(crate) fn pypi_satisfies_requirement( RequirementSource::Path { install_path, .. } | RequirementSource::Directory { install_path, .. } => { if let UrlOrPath::Path(locked_path) = &*locked_data.location { - eprintln!("Path from lock: {locked_path:?}"); let install_path = Utf8TypedPathBuf::from(install_path.to_string_lossy().to_string()); - eprintln!("Path from install: {install_path:?}"); let project_root = Utf8TypedPathBuf::from(project_root.to_string_lossy().to_string()); // Join relative paths with the project root @@ -1338,7 +1336,6 @@ pub(crate) fn pypi_satisfies_requirement( } else { project_root.join(locked_path.to_path()).normalize() }; - eprintln!("Path from lock (always absolute): {locked_path:?}"); if locked_path.to_path() != install_path { return Err(PlatformUnsat::LockedPyPIPathMismatch { name: spec.name.clone().to_string(), @@ -2124,7 +2121,7 @@ pub(crate) async fn verify_package_platform_satisfiability( // Iterate over all packages. First iterate over all conda matchspecs and then // over all pypi requirements. We want to ensure we always check the conda // packages first. - let mut conda_queue = environment_dependencies + let mut conda_stack = environment_dependencies .into_iter() .chain(resolved_dev_dependencies.into_iter()) .collect_vec(); @@ -2134,7 +2131,7 @@ pub(crate) async fn verify_package_platform_satisfiability( let mut conda_packages_used_by_pypi = HashSet::new(); let mut delayed_pypi_error = None; - while let Some(package) = conda_queue.pop().or_else(|| pypi_queue.pop()) { + while let Some(package) = conda_stack.pop().or_else(|| pypi_queue.pop()) { // Determine the package that matches the requirement of matchspec. let found_package = match package { Dependency::Input(name, spec, source) => { @@ -2357,13 +2354,13 @@ pub(crate) async fn verify_package_platform_satisfiability( }) { let anchored_location = anchor.resolve(source.clone()); let source_spec = SourceSpec::new(anchored_location, spec); - conda_queue.push(Dependency::CondaSource( + conda_stack.push(Dependency::CondaSource( package_name.clone(), source_spec, origin, )); } else { - conda_queue.push(Dependency::Conda( + conda_stack.push(Dependency::Conda( MatchSpec::from_nameless(spec, name), origin, )); diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index 3a0672fa80..d8a6ccb1e2 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -1155,7 +1155,15 @@ async fn resolve_partial_record( } }; - let spec = build_source_metadata_spec(source, channel_config, channels, variants, variant_files, virtual_packages, platform); + let spec = build_source_metadata_spec( + source, + channel_config, + channels, + variants, + variant_files, + virtual_packages, + platform, + ); let metadata = command_dispatcher .source_metadata(spec) @@ -1585,10 +1593,7 @@ impl<'p> UpdateContextBuilder<'p> { let unresolved = records .cloned() .map(|data| { - UnresolvedPixiRecord::from_conda_package_data( - data, - workspace_root, - ) + UnresolvedPixiRecord::from_conda_package_data(data, workspace_root) }) .collect::, _>>()?; Ok((platform, unresolved)) diff --git a/crates/pixi_core/src/lock_file/virtual_packages.rs b/crates/pixi_core/src/lock_file/virtual_packages.rs index a11d8c637d..bac28eed8b 100644 --- a/crates/pixi_core/src/lock_file/virtual_packages.rs +++ b/crates/pixi_core/src/lock_file/virtual_packages.rs @@ -169,8 +169,7 @@ pub(crate) fn validate_system_meets_environment_requirements( .collect_vec(); // Get the virtual packages required by the conda records - let required_virtual_packages = - get_required_virtual_packages_from_depends(&all_depends)?; + let required_virtual_packages = get_required_virtual_packages_from_depends(&all_depends)?; // Find the python package record (needed for wheel tag validation below). // This works for binary and full source packages; partial source records @@ -257,8 +256,8 @@ pub(crate) fn validate_system_meets_environment_requirements( if lock_platform.is_some_and(|p| environment.has_pypi_packages(p)) && let Some(pypi_packages) = lock_platform.and_then(|p| environment.pypi_packages(p)) { - let python_record = python_record - .ok_or(MachineValidationError::NoPythonRecordFound(platform))?; + let python_record = + python_record.ok_or(MachineValidationError::NoPythonRecordFound(platform))?; // Check if all the wheel tags match the system virtual packages let pypi_packages = pypi_packages.cloned().collect_vec(); @@ -311,8 +310,7 @@ mod test { .map(|s| s.as_str()) .collect(); - let virtual_matchspecs = - get_required_virtual_packages_from_depends(&all_depends).unwrap(); + let virtual_matchspecs = get_required_virtual_packages_from_depends(&all_depends).unwrap(); assert!( virtual_matchspecs diff --git a/crates/pixi_core/src/workspace/mod.rs b/crates/pixi_core/src/workspace/mod.rs index ccbeb4c556..f02049f070 100644 --- a/crates/pixi_core/src/workspace/mod.rs +++ b/crates/pixi_core/src/workspace/mod.rs @@ -761,9 +761,7 @@ impl Workspace { filter_lock_file(self, lock_file, |env, platform, package| { if affected_environments.contains(&(env.name().as_str(), platform)) { match package { - LockedPackageRef::Conda(package) => { - !conda_packages.contains(package.name()) - } + LockedPackageRef::Conda(package) => !conda_packages.contains(package.name()), LockedPackageRef::Pypi(package) => !pypi_packages.contains(&package.name), } } else { diff --git a/crates/pixi_diff/src/lib.rs b/crates/pixi_diff/src/lib.rs index bf14cdfe3d..494a12ecf2 100644 --- a/crates/pixi_diff/src/lib.rs +++ b/crates/pixi_diff/src/lib.rs @@ -61,10 +61,9 @@ impl LockFileDiff { .into_iter() .flatten() .partition_map(|p| match p { - LockedPackageRef::Conda(conda_package_data) => Either::Left(( - conda_package_data.name().clone(), - conda_package_data, - )), + LockedPackageRef::Conda(conda_package_data) => { + Either::Left((conda_package_data.name().clone(), conda_package_data)) + } LockedPackageRef::Pypi(pypi_package_data) => { Either::Right((pypi_package_data.name.clone(), pypi_package_data)) } @@ -253,7 +252,11 @@ impl LockFileDiff { fn format_conda_identifier(p: &CondaPackageData) -> String { match p { CondaPackageData::Binary(b) => { - format!("{} {}", b.package_record.version.as_str(), &b.package_record.build) + format!( + "{} {}", + b.package_record.version.as_str(), + &b.package_record.build + ) } CondaPackageData::Source(s) => { format!("@ {}", &s.location) @@ -327,9 +330,15 @@ impl LockFileDiff { consts::CondaEmoji, name, choose_style(&prev_ver, &curr_ver), - choose_style(prev.package_record.build.as_str(), curr.package_record.build.as_str()), + choose_style( + prev.package_record.build.as_str(), + curr.package_record.build.as_str() + ), choose_style(&curr_ver, &prev_ver), - choose_style(curr.package_record.build.as_str(), prev.package_record.build.as_str()), + choose_style( + curr.package_record.build.as_str(), + prev.package_record.build.as_str() + ), ) } (CondaPackageData::Source(prev), CondaPackageData::Source(curr)) => { diff --git a/crates/pixi_record/src/lib.rs b/crates/pixi_record/src/lib.rs index 055e542591..d636001a8e 100644 --- a/crates/pixi_record/src/lib.rs +++ b/crates/pixi_record/src/lib.rs @@ -18,9 +18,11 @@ use rattler_conda_types::{ }; use rattler_lock::{CondaPackageData, ConversionError, UrlOrPath}; use serde::Serialize; +// Re-export the fully-resolved type as `SourceRecord` since it is the most +// commonly used variant throughout the codebase. pub use source_record::{ - FullSourceRecord as SourceRecord, FullSourceRecordData, PinnedBuildSourceSpec, - PartialSourceRecord, PartialSourceRecordData, SourceRecordData, UnresolvedSourceRecord, + FullSourceRecord as SourceRecord, FullSourceRecordData, PartialSourceRecord, + PartialSourceRecordData, PinnedBuildSourceSpec, SourceRecordData, UnresolvedSourceRecord, }; use thiserror::Error; @@ -55,7 +57,7 @@ impl PixiRecord { match self { PixiRecord::Binary(record) => record.into(), PixiRecord::Source(record) => { - CondaPackageData::Source(record.into_conda_source_data(workspace_root)) + CondaPackageData::Source(Box::new(record.into_conda_source_data(workspace_root))) } } } @@ -116,8 +118,13 @@ impl From for PixiRecord { /// A record that may contain partial source metadata (not yet resolved). /// -/// Used at the lock-file boundary: lock-file read produces these, and they must -/// be resolved to [`PixiRecord`] before use in solving/installing. +/// Lifecycle: lock-file read produces `UnresolvedPixiRecord` values. Binary +/// records and immutable source records are already resolved; mutable source +/// records are partial and must be resolved by re-evaluating source metadata +/// before the record can be used for solving or installing. +/// +/// Call [`try_into_resolved`](Self::try_into_resolved) to attempt the +/// conversion to a fully-resolved [`PixiRecord`]. #[allow(clippy::large_enum_variant)] #[derive(Debug, Clone)] pub enum UnresolvedPixiRecord { @@ -196,8 +203,8 @@ impl UnresolvedPixiRecord { match data { CondaPackageData::Binary(value) => { let location = value.location.clone(); - Ok(UnresolvedPixiRecord::Binary( - value.try_into().map_err(|err| match err { + Ok(UnresolvedPixiRecord::Binary((*value).try_into().map_err( + |err| match err { ConversionError::Missing(field) => { ParseLockFileError::Missing(location, field) } @@ -207,11 +214,11 @@ impl UnresolvedPixiRecord { ConversionError::InvalidBinaryPackageLocation => { ParseLockFileError::InvalidArchiveFilename(location) } - })?, - )) + }, + )?)) } CondaPackageData::Source(value) => Ok(UnresolvedPixiRecord::Source( - UnresolvedSourceRecord::from_conda_source_data(value, workspace_root)?, + UnresolvedSourceRecord::from_conda_source_data(*value, workspace_root)?, )), } } @@ -221,38 +228,28 @@ impl UnresolvedPixiRecord { match self { UnresolvedPixiRecord::Binary(record) => record.into(), UnresolvedPixiRecord::Source(record) => { - CondaPackageData::Source(record.into_conda_source_data(workspace_root)) + CondaPackageData::Source(Box::new(record.into_conda_source_data(workspace_root))) } } } - /// Try to convert into a fully resolved `PixiRecord`. + /// Try to convert into a fully resolved [`PixiRecord`]. /// - /// Returns `Ok(PixiRecord)` if this is a binary record or a full source - /// record. Returns `Err(self)` if this is a partial source record. + /// Returns `Ok(PixiRecord)` if this is a binary record or a source record + /// with full metadata. Returns `Err(self)` if this is a partial source + /// record that still needs metadata resolution (i.e. re-evaluation of + /// the mutable source). + #[allow(clippy::result_large_err)] pub fn try_into_resolved(self) -> Result { match self { UnresolvedPixiRecord::Binary(record) => Ok(PixiRecord::Binary(record)), - UnresolvedPixiRecord::Source(source) => match source.data { - SourceRecordData::Full(full) => { - Ok(PixiRecord::Source(source_record::SourceRecord { - data: full, - manifest_source: source.manifest_source, - build_source: source.build_source, - variants: source.variants, - identifier_hash: source.identifier_hash, - })) - } - SourceRecordData::Partial(partial) => { - Err(UnresolvedPixiRecord::Source(source_record::SourceRecord { - data: SourceRecordData::Partial(partial), - manifest_source: source.manifest_source, - build_source: source.build_source, - variants: source.variants, - identifier_hash: source.identifier_hash, - })) - } - }, + UnresolvedPixiRecord::Source(source) => source + .try_map_data(|data| match data { + SourceRecordData::Full(full) => Ok(full), + SourceRecordData::Partial(partial) => Err(SourceRecordData::Partial(partial)), + }) + .map(PixiRecord::Source) + .map_err(UnresolvedPixiRecord::Source), } } } diff --git a/crates/pixi_record/src/source_record.rs b/crates/pixi_record/src/source_record.rs index 7513f282de..d112a7dd6f 100644 --- a/crates/pixi_record/src/source_record.rs +++ b/crates/pixi_record/src/source_record.rs @@ -1,9 +1,38 @@ +//! Source records for conda packages that require building from source. +//! +//! # Full vs Partial vs Unresolved +//! +//! Source records exist in three states: +//! +//! - **Full** ([`FullSourceRecord`]): all metadata is available (package record, +//! dependencies, sources). Safe to use for building and installing. +//! - **Partial** ([`PartialSourceRecord`]): only minimal metadata (name, depends, +//! sources). Produced when a *mutable* (path-based) source is written to the +//! lock file, because the full metadata would be stale by the next read. +//! - **Unresolved** ([`UnresolvedSourceRecord`]): may be either full or partial. +//! This is what the lock file produces on read. +//! +//! # State transitions +//! +//! ```text +//! Lock-file write: FullSourceRecord ──► Partial (if mutable source) +//! ──► Full (if immutable source, e.g. git) +//! +//! Lock-file read: ──► UnresolvedSourceRecord (Full or Partial) +//! +//! Startup resolve: UnresolvedSourceRecord ──► FullSourceRecord +//! (re-evaluates source metadata for partial records) +//! ``` +//! +//! Use [`SourceRecord::map_data`] and [`SourceRecord::try_map_data`] for clean +//! state transitions without field-by-field reconstruction. + use pixi_git::sha::GitSha; use pixi_spec::{GitReference, SourceLocationSpec}; use rattler_conda_types::{MatchSpec, Matches, NamelessMatchSpec, PackageName, PackageRecord}; use rattler_lock::{ - CondaSourceData, FullSourceMetadata, GitShallowSpec, PackageBuildSource, - PartialSourceMetadata, SourceMetadata, + CondaSourceData, FullSourceMetadata, GitShallowSpec, PackageBuildSource, PartialSourceMetadata, + SourceMetadata, }; use std::fmt::{Display, Formatter}; use std::{ @@ -94,15 +123,30 @@ pub struct SourceRecord { pub identifier_hash: Option, } -/// A source record with full metadata (package record + sources). +/// A fully-resolved source record with all metadata available. +/// +/// This is the primary type used throughout the codebase for building, +/// installing, and solving. Re-exported as `SourceRecord` from the crate root. pub type FullSourceRecord = SourceRecord; -/// A source record with only the package name (no metadata resolved yet). +/// A source record with only minimal metadata (name, depends, sources). +/// +/// Produced when a mutable (path-based) source is written to the lock file. +/// Not used directly outside this crate; see [`UnresolvedSourceRecord`]. pub type PartialSourceRecord = SourceRecord; -/// A source record that may be full or partial. +/// A source record that may be either full or partial. This is the lock-file +/// boundary type. +/// +/// Use [`UnresolvedPixiRecord::try_into_resolved`](crate::UnresolvedPixiRecord::try_into_resolved) +/// to check whether resolution is needed. pub type UnresolvedSourceRecord = SourceRecord; +/// Minimal metadata for a source package whose full record is not yet known. +/// +/// This is what gets stored in the lock file for mutable (path-based) sources, +/// since their full metadata (version, build string, etc.) can change between +/// runs and would be stale. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct PartialSourceRecordData { /// The package name of the source record. @@ -116,6 +160,10 @@ pub struct PartialSourceRecordData { pub sources: HashMap, } +/// Complete metadata for a fully-evaluated source package. +/// +/// Contains the full [`PackageRecord`] (version, build, dependencies, etc.) +/// plus the source dependency map. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct FullSourceRecordData { #[serde(flatten)] @@ -126,7 +174,13 @@ pub struct FullSourceRecordData { pub sources: HashMap, } +/// Runtime-checked variant used at the lock-file boundary. +/// +/// After reading a lock file, source records may be either full (immutable +/// sources like git) or partial (mutable sources like local paths). This enum +/// captures both cases and is resolved to [`FullSourceRecordData`] at startup. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +#[allow(clippy::large_enum_variant)] pub enum SourceRecordData { Partial(PartialSourceRecordData), Full(FullSourceRecordData), @@ -180,6 +234,55 @@ impl SourceRecord { pub fn variants(&self) -> &BTreeMap { &self.variants } + + /// Transform the data payload while preserving all shared fields. + /// + /// Useful for state transitions (e.g. Full → Unresolved, Partial → Full) + /// without field-by-field reconstruction. + pub fn map_data(self, f: impl FnOnce(D) -> D2) -> SourceRecord { + SourceRecord { + data: f(self.data), + manifest_source: self.manifest_source, + build_source: self.build_source, + variants: self.variants, + identifier_hash: self.identifier_hash, + } + } + + /// Fallible version of [`map_data`](Self::map_data). + /// + /// On success the record carries the new data type; on failure the record + /// is reassembled with the error type so no information is lost. + #[allow(clippy::result_large_err)] + pub fn try_map_data( + self, + f: impl FnOnce(D) -> Result, + ) -> Result, SourceRecord> { + let SourceRecord { + data, + manifest_source, + build_source, + variants, + identifier_hash, + } = self; + let shared = (manifest_source, build_source, variants, identifier_hash); + match f(data) { + Ok(new_data) => Ok(SourceRecord { + data: new_data, + manifest_source: shared.0, + build_source: shared.1, + variants: shared.2, + identifier_hash: shared.3, + }), + Err(err_data) => Err(SourceRecord { + data: err_data, + manifest_source: shared.0, + build_source: shared.1, + variants: shared.2, + identifier_hash: shared.3, + }), + } + } } impl SourceRecord { @@ -215,14 +318,17 @@ impl SourceRecord { /// Convert into lock-file compatible `CondaSourceData`. /// - /// If either source (manifest or build) is mutable, the record is - /// downgraded to partial metadata so the lock-file only stores minimal - /// information for packages whose metadata may change. + /// If either source (manifest or build) is mutable (path-based), the + /// record is downgraded to partial metadata. This is intentional: mutable + /// sources can change between runs, so storing full metadata (version, + /// build string, hashes) would be misleading because it would appear locked + /// but could silently become stale. By keeping only name, depends, and + /// sources, we force re-evaluation at the next lock-file read. pub fn into_conda_source_data(self, workspace_root: &Path) -> CondaSourceData { let has_mutable = self.has_mutable_source(); let mut unresolved = SourceRecord::::from(self); if has_mutable { - // Downgrade full data to partial — keep only name, depends, and sources. + // Downgrade full data to partial: keep only name, depends, and sources. if let SourceRecordData::Full(full) = unresolved.data { unresolved.data = SourceRecordData::Partial(PartialSourceRecordData { name: full.package_record.name, @@ -334,15 +440,21 @@ impl SourceRecord { let metadata = match self.data { SourceRecordData::Full(full) => SourceMetadata::Full(Box::new(FullSourceMetadata { package_record: full.package_record, - sources: full.sources.into_iter().map(|(k, v)| (k, v.into())).collect(), + sources: full + .sources + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(), })), - SourceRecordData::Partial(partial) => { - SourceMetadata::Partial(PartialSourceMetadata { - name: partial.name, - depends: partial.depends, - sources: partial.sources.into_iter().map(|(k, v)| (k, v.into())).collect(), - }) - } + SourceRecordData::Partial(partial) => SourceMetadata::Partial(PartialSourceMetadata { + name: partial.name, + depends: partial.depends, + sources: partial + .sources + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(), + }), }; CondaSourceData { @@ -380,7 +492,11 @@ impl SourceRecord { SourceRecordData::Partial(PartialSourceRecordData { name: partial.name, depends: partial.depends, - sources: partial.sources.into_iter().map(|(k, v)| (k, SourceLocationSpec::from(v))).collect(), + sources: partial + .sources + .into_iter() + .map(|(k, v)| (k, SourceLocationSpec::from(v))) + .collect(), }) } }; @@ -402,13 +518,7 @@ impl SourceRecord { /// Upcast from full to unresolved. impl From> for SourceRecord { fn from(record: SourceRecord) -> Self { - Self { - data: SourceRecordData::Full(record.data), - manifest_source: record.manifest_source, - build_source: record.build_source, - variants: record.variants, - identifier_hash: record.identifier_hash, - } + record.map_data(SourceRecordData::Full) } } @@ -579,7 +689,7 @@ mod tests { }) .collect(); - // Write back — mutable (path) records should become partial, + // Write back: mutable (path) records should become partial, // immutable (git) records stay full. let roundtrip_lock = build_lock_from_records(&roundtrip_records, workspace_root); let mut settings = insta::Settings::clone_current(); @@ -702,7 +812,10 @@ mod tests { path: typed_path::Utf8TypedPathBuf::from("./my-package"), }), build_source: None, - variants: BTreeMap::from([("python".into(), crate::VariantValue::from("3.12".to_string()))]), + variants: BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]), identifier_hash: Some("abcd1234".to_string()), }; @@ -710,12 +823,11 @@ mod tests { // Roundtrip through CondaSourceData. let conda_data = partial.into_conda_source_data(workspace_root); - let roundtripped = - super::SourceRecord::::from_conda_source_data( - conda_data, - workspace_root, - ) - .expect("from_conda_source_data should succeed"); + let roundtripped = super::SourceRecord::::from_conda_source_data( + conda_data, + workspace_root, + ) + .expect("from_conda_source_data should succeed"); assert_eq!(roundtripped.name().as_source(), "my-package"); assert!(roundtripped.data.is_partial()); @@ -747,12 +859,11 @@ mod tests { .next() .expect("expected at least one source package"); - let unresolved = - UnresolvedPixiRecord::from_conda_package_data( - CondaPackageData::Source(conda_source), - workspace_root, - ) - .expect("from_conda_package_data should succeed"); + let unresolved = UnresolvedPixiRecord::from_conda_package_data( + CondaPackageData::Source(Box::new(conda_source)), + workspace_root, + ) + .expect("from_conda_package_data should succeed"); let resolved = unresolved.try_into_resolved(); assert!(resolved.is_ok()); @@ -783,6 +894,176 @@ mod tests { assert_eq!(still_partial.name().as_source(), "partial-pkg"); } + /// Helper to create a minimal full source record for testing. + fn make_full_record( + name: &str, + manifest_source: PinnedSourceSpec, + build_source: Option, + variants: BTreeMap, + ) -> SourceRecord { + let mut record = PackageRecord::new( + PackageName::from_str(name).unwrap(), + "1.0.0" + .parse::() + .unwrap(), + "h1234_0".into(), + ); + record.subdir = "linux-64".into(); + record.depends = vec!["python >=3.8".into()]; + SourceRecord { + data: FullSourceRecordData { + package_record: record, + sources: HashMap::new(), + }, + manifest_source, + build_source, + variants, + identifier_hash: None, + } + } + + fn path_source(p: &str) -> PinnedSourceSpec { + PinnedSourceSpec::Path(PinnedPathSpec { + path: typed_path::Utf8TypedPathBuf::from(p), + }) + } + + fn git_source() -> PinnedSourceSpec { + PinnedSourceSpec::Git(crate::PinnedGitSpec { + git: url::Url::parse("https://github.com/example/repo.git").unwrap(), + source: crate::PinnedGitCheckout { + commit: pixi_git::sha::GitSha::from_str("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + .unwrap(), + subdirectory: Default::default(), + reference: pixi_spec::GitReference::DefaultBranch, + }, + }) + } + + #[test] + fn path_source_is_mutable() { + let record = make_full_record("my-pkg", path_source("./my-pkg"), None, BTreeMap::new()); + assert!(record.has_mutable_source()); + } + + #[test] + fn git_source_is_not_mutable() { + let record = make_full_record("my-pkg", git_source(), None, BTreeMap::new()); + assert!(!record.has_mutable_source()); + } + + #[test] + fn mutable_build_source_triggers_mutable() { + let record = make_full_record( + "my-pkg", + git_source(), + Some(PinnedBuildSourceSpec::Absolute(path_source("./build-dir"))), + BTreeMap::new(), + ); + assert!(record.has_mutable_source()); + } + + #[test] + fn path_source_downgrades_to_partial_in_lockfile() { + let record = make_full_record("my-pkg", path_source("./my-pkg"), None, BTreeMap::new()); + let conda_data = record.into_conda_source_data(Path::new("/workspace")); + assert!( + matches!(conda_data.metadata, SourceMetadata::Partial(_)), + "mutable source should be downgraded to partial" + ); + } + + #[test] + fn git_source_stays_full_in_lockfile() { + let record = make_full_record("my-pkg", git_source(), None, BTreeMap::new()); + let conda_data = record.into_conda_source_data(Path::new("/workspace")); + assert!( + matches!(conda_data.metadata, SourceMetadata::Full(_)), + "immutable source should stay full" + ); + } + + #[test] + fn refers_to_same_output_same_name_same_variants() { + let variants = BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]); + let a = make_full_record("pkg", path_source("."), None, variants.clone()); + let b = make_full_record("pkg", path_source("."), None, variants); + assert!(a.refers_to_same_output(&b)); + } + + #[test] + fn refers_to_same_output_different_variants() { + let a = make_full_record( + "pkg", + path_source("."), + None, + BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]), + ); + let b = make_full_record( + "pkg", + path_source("."), + None, + BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.11".to_string()), + )]), + ); + assert!(!a.refers_to_same_output(&b)); + } + + #[test] + fn refers_to_same_output_empty_variants_is_true() { + let a = make_full_record("pkg", path_source("."), None, BTreeMap::new()); + let b = make_full_record( + "pkg", + path_source("."), + None, + BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]), + ); + assert!(a.refers_to_same_output(&b)); + } + + #[test] + fn refers_to_same_output_different_names() { + let variants = BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]); + let a = make_full_record("pkg-a", path_source("."), None, variants.clone()); + let b = make_full_record("pkg-b", path_source("."), None, variants); + assert!(!a.refers_to_same_output(&b)); + } + + #[test] + fn map_data_preserves_shared_fields() { + let record = make_full_record( + "my-pkg", + path_source("./my-pkg"), + None, + BTreeMap::from([( + "python".into(), + crate::VariantValue::from("3.12".to_string()), + )]), + ); + let unresolved: super::SourceRecord = + record.map_data(SourceRecordData::Full); + assert_eq!(unresolved.name().as_source(), "my-pkg"); + assert!(unresolved.data.is_full()); + assert_eq!( + unresolved.variants.get("python").map(|v| v.to_string()), + Some("3.12".to_string()) + ); + } + #[test] fn full_upcast_roundtrip() { let workspace_root = Path::new("/workspace"); @@ -804,22 +1085,20 @@ mod tests { .expect("expected at least one source package"); // Parse as unresolved record (first record in fixture is git = immutable = full). - let unresolved = - super::SourceRecord::::from_conda_source_data( - conda_source, - workspace_root, - ) - .expect("from_conda_source_data should succeed"); + let unresolved = super::SourceRecord::::from_conda_source_data( + conda_source, + workspace_root, + ) + .expect("from_conda_source_data should succeed"); assert!(unresolved.data.is_full()); // Roundtrip through CondaSourceData. let conda_data = unresolved.into_conda_source_data(workspace_root); - let roundtripped = - super::SourceRecord::::from_conda_source_data( - conda_data, - workspace_root, - ) - .expect("roundtrip should succeed"); + let roundtripped = super::SourceRecord::::from_conda_source_data( + conda_data, + workspace_root, + ) + .expect("roundtrip should succeed"); assert!(roundtripped.data.is_full()); } diff --git a/crates/pixi_spec/src/lib.rs b/crates/pixi_spec/src/lib.rs index 3e5777d905..2ca0bd9588 100644 --- a/crates/pixi_spec/src/lib.rs +++ b/crates/pixi_spec/src/lib.rs @@ -682,7 +682,12 @@ impl From for rattler_lock::source::SourceLocation { #[cfg(feature = "rattler_lock")] impl From for UrlSourceSpec { fn from(value: rattler_lock::source::UrlSourceLocation) -> Self { - let rattler_lock::source::UrlSourceLocation { url, md5, sha256, subdirectory } = value; + let rattler_lock::source::UrlSourceLocation { + url, + md5, + sha256, + subdirectory, + } = value; Self { url, From d81a94254724d5a588c1c6526623d5e9f99e97ac Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 12 Mar 2026 16:13:01 +0100 Subject: [PATCH 11/15] fix: issues --- .../tests/integration_rust/build_tests.rs | 6 +- .../pixi/tests/integration_rust/common/mod.rs | 2 +- .../integration_rust/solve_group_tests.rs | 12 +- crates/pixi_config/src/lib.rs | 44 + ..._config__tests__config_merge_multiple.snap | 3 + crates/pixi_config/tests/config/config_2.toml | 1 + ...d__tests__roundtrip_conda_source_data.snap | 21 +- .../pixi_config_tomls/main_config.toml | 1 + examples/pixi-build/cpp-sdl/pixi.lock | 2372 ++++++++--------- 9 files changed, 1255 insertions(+), 1207 deletions(-) diff --git a/crates/pixi/tests/integration_rust/build_tests.rs b/crates/pixi/tests/integration_rust/build_tests.rs index 7d619b0546..67cef5ad71 100644 --- a/crates/pixi/tests/integration_rust/build_tests.rs +++ b/crates/pixi/tests/integration_rust/build_tests.rs @@ -789,14 +789,14 @@ test-source-pkg = {{ path = "./source-package" }} "Lock file should contain the source package" ); - // Verify we can find the package with the expected version + // Verify we can find the package assert!( lock_file.contains_match_spec( consts::DEFAULT_ENVIRONMENT_NAME, Platform::current(), - "test-source-pkg ==1.2.3" + "test-source-pkg" ), - "Lock file should contain test-source-pkg with version 1.2.3" + "Lock file should contain test-source-pkg" ); // Second invocation: Load the workspace again and check if lock-file is up to date diff --git a/crates/pixi/tests/integration_rust/common/mod.rs b/crates/pixi/tests/integration_rust/common/mod.rs index 0067bffa5e..d6a2dcca83 100644 --- a/crates/pixi/tests/integration_rust/common/mod.rs +++ b/crates/pixi/tests/integration_rust/common/mod.rs @@ -159,7 +159,7 @@ impl LockFileExt for LockFile { .into_iter() .flatten() .filter_map(LockedPackageRef::as_conda) - .any(|package| package.record().name.as_normalized() == name) + .any(|package| package.name().as_normalized() == name) } fn contains_pypi_package(&self, environment: &str, platform: Platform, name: &str) -> bool { let Some(env) = self.environment(environment) else { diff --git a/crates/pixi/tests/integration_rust/solve_group_tests.rs b/crates/pixi/tests/integration_rust/solve_group_tests.rs index d3f438771d..a51ba4f01c 100644 --- a/crates/pixi/tests/integration_rust/solve_group_tests.rs +++ b/crates/pixi/tests/integration_rust/solve_group_tests.rs @@ -214,8 +214,8 @@ async fn test_purl_are_added_for_pypi() { .packages(p) .unwrap() .for_each(|dep| { - if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { - assert!(dep.as_conda().unwrap().record().purls.is_none()); + if dep.as_conda().unwrap().name() == &PackageName::from_str("boltons").unwrap() { + assert!(dep.as_conda().unwrap().record().unwrap().purls.is_none()); } }); @@ -237,13 +237,11 @@ async fn test_purl_are_added_for_pypi() { .packages(p) .unwrap() .for_each(|dep| { - if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { + if dep.as_conda().unwrap().name() == &PackageName::from_str("boltons").unwrap() { assert_eq!( dep.as_conda() - .unwrap() - .record() - .purls - .as_ref() + .and_then(|c| c.as_binary()) + .and_then(|c| c.package_record.purls.as_ref()) .unwrap() .first() .unwrap() diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index a7ab85e6b7..d5c2b573b9 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -285,6 +285,12 @@ impl ConfigCliActivation { #[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] pub struct RepodataChannelConfig { + /// Deprecated: JLAP support has been removed. This field is kept only so + /// existing config files continue to parse. A deprecation warning is + /// emitted when set. + #[serde(alias = "disable_jlap")] // BREAK: remove to stop supporting snake_case alias + #[serde(skip_serializing_if = "Option::is_none")] + pub disable_jlap: Option, /// Disable bzip2 compression for repodata. #[serde(alias = "disable_bzip2")] // BREAK: remove to stop supporting snake_case alias #[serde(skip_serializing_if = "Option::is_none")] @@ -308,6 +314,7 @@ impl RepodataChannelConfig { pub fn merge(&self, other: Self) -> Self { Self { + disable_jlap: self.disable_jlap.or(other.disable_jlap), disable_zstd: self.disable_zstd.or(other.disable_zstd), disable_bzip2: self.disable_bzip2.or(other.disable_bzip2), disable_sharded: self.disable_sharded.or(other.disable_sharded), @@ -474,6 +481,16 @@ impl ExperimentalConfig { // default settings in the future. /// The default maximum number of concurrent solves that can be run at once. /// Defaulting to the number of CPUs available. +/// Emit a one-time deprecation warning for the `disable-jlap` config field. +fn warn_jlap_deprecated() { + static ONCE: std::sync::Once = std::sync::Once::new(); + ONCE.call_once(|| { + tracing::warn!( + "'disable-jlap' is deprecated and has no effect; JLAP support has been removed" + ); + }); +} + fn default_max_concurrent_solves() -> usize { std::thread::available_parallelism().map_or(1, |n| n.get()) } @@ -1243,6 +1260,11 @@ impl Config { .validate() .map_err(|e| ConfigError::ValidationError(e, path.to_path_buf()))?; + // Warn about deprecated fields (once only) + if config.repodata_config.default.disable_jlap.is_some() { + warn_jlap_deprecated(); + } + // check proxy config if config.proxy_config.https.is_none() && config.proxy_config.http.is_none() { if !config.proxy_config.non_proxy_hosts.is_empty() { @@ -1385,6 +1407,7 @@ impl Config { "pypi-config.keyring-provider", "repodata-config", "repodata-config.disable-bzip2", + "repodata-config.disable-jlap", "repodata-config.disable-sharded", "repodata-config.disable-zstd", "run-post-link-scripts", @@ -1660,6 +1683,11 @@ impl Config { let subkey = key.strip_prefix("repodata-config.").unwrap(); match subkey { + "disable-jlap" => { + self.repodata_config.default.disable_jlap = + value.map(|v| v.parse()).transpose().into_diagnostic()?; + warn_jlap_deprecated(); + } "disable-bzip2" => { self.repodata_config.default.disable_bzip2 = value.map(|v| v.parse()).transpose().into_diagnostic()?; @@ -2295,6 +2323,7 @@ UNUSED = "unused" )]), repodata_config: RepodataConfig { default: RepodataChannelConfig { + disable_jlap: Some(true), disable_bzip2: Some(true), disable_sharded: Some(true), disable_zstd: Some(true), @@ -2433,6 +2462,7 @@ UNUSED = "unused" "https://prefix.dev/conda-forge" ] [repodata_config] + disable_jlap = true disable_bzip2 = true disable_zstd = true "#; @@ -2454,6 +2484,7 @@ UNUSED = "unused" Some(&vec![Url::parse("https://prefix.dev/conda-forge").unwrap()]) ); let repodata_config = config.repodata_config; + assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, None); @@ -2468,6 +2499,7 @@ UNUSED = "unused" "https://prefix.dev/conda-forge" ] [repodata-config] + disable-jlap = true disable-bzip2 = true disable-zstd = true disable-sharded = true @@ -2637,6 +2669,12 @@ UNUSED = "unused" ); // Test more repodata-config options + config + .set("repodata-config.disable-jlap", Some("true".to_string())) + .unwrap(); + let repodata_config = config.repodata_config(); + assert_eq!(repodata_config.default.disable_jlap, Some(true)); + config .set("repodata-config.disable-bzip2", Some("true".to_string())) .unwrap(); @@ -2850,21 +2888,25 @@ UNUSED = "unused" fn test_repodata_config() { let toml = r#" [repodata-config] + disable-jlap = true disable-bzip2 = true disable-zstd = true disable-sharded = true [repodata-config."https://prefix.dev/conda-forge"] + disable-jlap = false disable-bzip2 = false disable-zstd = false disable-sharded = false [repodata-config."https://conda.anaconda.org/conda-forge"] + disable-jlap = false disable-bzip2 = false disable-zstd = false "#; let (config, _) = Config::from_toml(toml, None).unwrap(); let repodata_config = config.repodata_config(); + assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, Some(true)); @@ -2875,6 +2917,7 @@ UNUSED = "unused" let prefix_config = per_channel .get(&Url::from_str("https://prefix.dev/conda-forge").unwrap()) .unwrap(); + assert_eq!(prefix_config.disable_jlap, Some(false)); assert_eq!(prefix_config.disable_bzip2, Some(false)); assert_eq!(prefix_config.disable_zstd, Some(false)); assert_eq!(prefix_config.disable_sharded, Some(false)); @@ -2882,6 +2925,7 @@ UNUSED = "unused" let anaconda_config = per_channel .get(&Url::from_str("https://conda.anaconda.org/conda-forge").unwrap()) .unwrap(); + assert_eq!(anaconda_config.disable_jlap, Some(false)); assert_eq!(anaconda_config.disable_bzip2, Some(false)); assert_eq!(anaconda_config.disable_zstd, Some(false)); assert_eq!(anaconda_config.disable_sharded, None); diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap index c16519292a..063bcde6de 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap @@ -77,6 +77,9 @@ Config { }, repodata_config: RepodataConfig { default: RepodataChannelConfig { + disable_jlap: Some( + true, + ), disable_bzip2: None, disable_zstd: Some( true, diff --git a/crates/pixi_config/tests/config/config_2.toml b/crates/pixi_config/tests/config/config_2.toml index 94fd7d8dc8..934ed4bd2c 100644 --- a/crates/pixi_config/tests/config/config_2.toml +++ b/crates/pixi_config/tests/config/config_2.toml @@ -2,6 +2,7 @@ change_ps1 = true tls_no_verify = false [repodata_config] +disable_jlap = true disable_zstd = true [mirrors] diff --git a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap index ac82fb7cd3..002c6f498d 100644 --- a/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap +++ b/crates/pixi_record/src/snapshots/pixi_record__source_record__tests__roundtrip_conda_source_data.snap @@ -1,5 +1,6 @@ --- source: crates/pixi_record/src/source_record.rs +assertion_line: 698 expression: roundtrip_lock --- version: 7 @@ -23,27 +24,27 @@ packages: version: 1.1.0 build: h234567_0 subdir: noarch - package_build_source: + source: path: ../src - conda_source: git-no-manifest-subdir[f0ed072f] @ git+https://github.com/example/repo.git?tag=v1.0.0#abc123def456abc123def456abc123def456abc1 version: 3.0.0 build: h901237_0 subdir: noarch - package_build_source: + source: path: build/subdir - conda_source: git-sibling-test[cb1b107e] @ git+https://github.com/example/mono-repo.git?subdirectory=recipes&branch=main#abc123def456abc123def456abc123def456abc1 version: 1.0.0 build: h123456_0 subdir: noarch - package_build_source: + source: path: ../non-nested -- source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe - package_build_source: +- conda_source: path-absolute-manifest[17e682f7] @ /workspace/absolute-recipe + source: path: ../src -- source: path-child-test[9d062313] @ recipes/my-package - package_build_source: +- conda_source: path-child-test[9d062313] @ recipes/my-package + source: path: ../../src/lib -- source: path-no-build-source[e11447f7] @ recipes/no-build -- source: path-sibling-test[2c2ab470] @ recipes/my-package - package_build_source: +- conda_source: path-no-build-source[e11447f7] @ recipes/no-build +- conda_source: path-sibling-test[2c2ab470] @ recipes/my-package + source: path: ../../other-package/src diff --git a/docs/source_files/pixi_config_tomls/main_config.toml b/docs/source_files/pixi_config_tomls/main_config.toml index a94a93c9e1..4c2f5e17eb 100644 --- a/docs/source_files/pixi_config_tomls/main_config.toml +++ b/docs/source_files/pixi_config_tomls/main_config.toml @@ -44,6 +44,7 @@ tool-platform = "win-64" # force tools like build backends to be installed for a # This should only be used for specific old versions of artifactory and other non-compliant # servers. disable-bzip2 = true # don't try to download repodata.json.bz2 +disable-jlap = true # deprecated, has no effect (JLAP support removed) disable-sharded = true # don't try to download sharded repodata disable-zstd = true # don't try to download repodata.json.zst # --8<-- [end:repodata-config] diff --git a/examples/pixi-build/cpp-sdl/pixi.lock b/examples/pixi-build/cpp-sdl/pixi.lock index 2860d4b4ba..9ef536a7eb 100644 --- a/examples/pixi-build/cpp-sdl/pixi.lock +++ b/examples/pixi-build/cpp-sdl/pixi.lock @@ -7,1195 +7,1195 @@ platforms: environments: default: channels: - - url: https://prefix.dev/pixi-build-backends/ - - url: https://prefix.dev/conda-forge/ + - url: https://prefix.dev/pixi-build-backends/ + - url: https://prefix.dev/conda-forge/ packages: linux-64: - - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda - - conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda - - conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - - conda: https://prefix.dev/conda-forge/linux-64/libcap-2.77-h3ff7636_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda - - conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libsndfile-1.2.2-hc7d488a_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda - - conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda - - conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda - - conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - - conda: https://prefix.dev/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - - conda: https://prefix.dev/conda-forge/linux-64/pulseaudio-client-17.0-h9a6aba3_3.conda - - conda: https://prefix.dev/conda-forge/linux-64/sdl2-2.32.56-h54a6638_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxscrnsaver-1.2.4-hb9d3cd8_0.conda - - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - - source: sdl_example[bd431642] @ . + - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 + - conda: https://prefix.dev/conda-forge/linux-64/libcap-2.77-h3ff7636_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libsndfile-1.2.2-hc7d488a_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda + - conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda + - conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + - conda: https://prefix.dev/conda-forge/linux-64/pulseaudio-client-17.0-h9a6aba3_3.conda + - conda: https://prefix.dev/conda-forge/linux-64/sdl2-2.32.56-h54a6638_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxscrnsaver-1.2.4-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - conda_source: sdl_example[bd431642] @ . osx-64: - - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda - - conda: https://prefix.dev/conda-forge/osx-64/dbus-1.16.2-h6e7f9a9_1.conda - - conda: https://prefix.dev/conda-forge/osx-64/libcxx-22.1.0-h19cb2f5_1.conda - - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda - - conda: https://prefix.dev/conda-forge/osx-64/libiconv-1.18-h57a12c2_2.conda - - conda: https://prefix.dev/conda-forge/osx-64/libintl-0.25.1-h3184127_1.conda - - conda: https://prefix.dev/conda-forge/osx-64/libusb-1.0.29-h2287256_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda - - conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/sdl2-2.32.56-h53ec75d_0.conda - - conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda - - source: sdl_example[2815fc71] @ . + - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + - conda: https://prefix.dev/conda-forge/osx-64/dbus-1.16.2-h6e7f9a9_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/libcxx-22.1.0-h19cb2f5_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/libiconv-1.18-h57a12c2_2.conda + - conda: https://prefix.dev/conda-forge/osx-64/libintl-0.25.1-h3184127_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/libusb-1.0.29-h2287256_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/sdl2-2.32.56-h53ec75d_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda + - conda_source: sdl_example[2815fc71] @ . osx-arm64: - - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/dbus-1.16.2-h3ff7a7c_1.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libcxx-22.1.0-h55c6f16_1.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libusb-1.0.29-hbc156a2_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/sdl2-2.32.56-h248ca61_0.conda - - conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda - - source: sdl_example[f88715ba] @ . + - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/dbus-1.16.2-h3ff7a7c_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libcxx-22.1.0-h55c6f16_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libusb-1.0.29-hbc156a2_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/sdl2-2.32.56-h248ca61_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda + - conda_source: sdl_example[f88715ba] @ . win-64: - - conda: https://prefix.dev/conda-forge/win-64/libusb-1.0.29-h1839187_0.conda - - conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda - - conda: https://prefix.dev/conda-forge/win-64/sdl2-2.32.56-h5112557_0.conda - - conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda - - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda - - conda: https://prefix.dev/conda-forge/win-64/vc-14.3-h41ae7f8_34.conda - - conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.44.35208-h818238b_34.conda - - conda: https://prefix.dev/conda-forge/win-64/vcomp14-14.44.35208-h818238b_34.conda - - source: sdl_example[6711f831] @ . + - conda: https://prefix.dev/conda-forge/win-64/libusb-1.0.29-h1839187_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda + - conda: https://prefix.dev/conda-forge/win-64/sdl2-2.32.56-h5112557_0.conda + - conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda + - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda + - conda: https://prefix.dev/conda-forge/win-64/vc-14.3-h41ae7f8_34.conda + - conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.44.35208-h818238b_34.conda + - conda: https://prefix.dev/conda-forge/win-64/vcomp14-14.44.35208-h818238b_34.conda + - conda_source: sdl_example[6711f831] @ . packages: -- conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda - build_number: 20 - sha256: 1dd3fffd892081df9726d7eb7e0dea6198962ba775bd88842135a4ddb4deb3c9 - md5: a9f577daf3de00bca7c3c76c0ecbd1de - depends: - - __glibc >=2.17,<3.0.a0 - - libgomp >=7.5.0 - constrains: - - openmp_impl <0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 28948 - timestamp: 1770939786096 -- conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda - sha256: a9c114cbfeda42a226e2db1809a538929d2f118ef855372293bd188f71711c48 - md5: 791365c5f65975051e4e017b5da3abf5 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: GPL-2.0-or-later - license_family: GPL - size: 68072 - timestamp: 1756738968573 -- conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda - sha256: 0b75d45f0bba3e95dc693336fa51f40ea28c980131fec438afb7ce6118ed05f6 - md5: d2ffd7602c02f2b316fd921d39876885 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: bzip2-1.0.6 - license_family: BSD - size: 260182 - timestamp: 1771350215188 -- conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda - sha256: 9f242f13537ef1ce195f93f0cc162965d6cc79da578568d6d8e50f70dd025c42 - md5: 4173ac3b19ec0a4f400b4f782910368b - depends: - - __osx >=10.13 - license: bzip2-1.0.6 - license_family: BSD - size: 133427 - timestamp: 1771350680709 -- conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda - sha256: 540fe54be35fac0c17feefbdc3e29725cce05d7367ffedfaaa1bdda234b019df - md5: 620b85a3f45526a8bc4d23fd78fc22f0 - depends: - - __osx >=11.0 - license: bzip2-1.0.6 - license_family: BSD - size: 124834 - timestamp: 1771350416561 -- conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda - sha256: 8bb557af1b2b7983cf56292336a1a1853f26555d9c6cecf1e5b2b96838c9da87 - md5: ce96f2f470d39bd96ce03945af92e280 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libstdcxx >=14 - - libzlib >=1.3.1,<2.0a0 - - libglib >=2.86.2,<3.0a0 - - libexpat >=2.7.3,<3.0a0 - license: AFL-2.1 OR GPL-2.0-or-later - size: 447649 - timestamp: 1764536047944 -- conda: https://prefix.dev/conda-forge/osx-64/dbus-1.16.2-h6e7f9a9_1.conda - sha256: 80ea0a20236ecb7006f7a89235802a34851eaac2f7f4323ca7acc094bcf7f372 - md5: cdbed7d22d4bdd74e60ce78bc7c6dd58 - depends: - - __osx >=10.13 - - libcxx >=19 - - libexpat >=2.7.3,<3.0a0 - - libglib >=2.86.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - license: AFL-2.1 OR GPL-2.0-or-later - size: 407670 - timestamp: 1764536068038 -- conda: https://prefix.dev/conda-forge/osx-arm64/dbus-1.16.2-h3ff7a7c_1.conda - sha256: a8207751ed261764061866880da38e4d3063e167178bfe85b6db9501432462ba - md5: 5a3506971d2d53023c1c4450e908a8da - depends: - - libcxx >=19 - - __osx >=11.0 - - libglib >=2.86.2,<3.0a0 - - libzlib >=1.3.1,<2.0a0 - - libexpat >=2.7.3,<3.0a0 - license: AFL-2.1 OR GPL-2.0-or-later - size: 393811 - timestamp: 1764536084131 -- conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab - md5: a8832b479f93521a9e7b5b743803be51 - depends: - - libgcc-ng >=12 - license: LGPL-2.0-only - license_family: LGPL - size: 508258 - timestamp: 1664996250081 -- conda: https://prefix.dev/conda-forge/linux-64/libcap-2.77-h3ff7636_0.conda - sha256: 9517cce5193144af0fcbf19b7bd67db0a329c2cc2618f28ffecaa921a1cbe9d3 - md5: 09c264d40c67b82b49a3f3b89037bd2e - depends: - - __glibc >=2.17,<3.0.a0 - - attr >=2.5.2,<2.6.0a0 - - libgcc >=14 - license: BSD-3-Clause - license_family: BSD - size: 121429 - timestamp: 1762349484074 -- conda: https://prefix.dev/conda-forge/osx-64/libcxx-22.1.0-h19cb2f5_1.conda - sha256: fa002b43752fe5860e588435525195324fe250287105ebd472ac138e97de45e6 - md5: 836389b6b9ae58f3fbcf7cafebd5c7f2 - depends: - - __osx >=11.0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 570141 - timestamp: 1772001147762 -- conda: https://prefix.dev/conda-forge/osx-arm64/libcxx-22.1.0-h55c6f16_1.conda - sha256: ce1049fa6fda9cf08ff1c50fb39573b5b0ea6958375d8ea7ccd8456ab81a0bcb - md5: e9c56daea841013e7774b5cd46f41564 - depends: - - __osx >=11.0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 568910 - timestamp: 1772001095642 -- conda: https://prefix.dev/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda - sha256: c076a213bd3676cc1ef22eeff91588826273513ccc6040d9bea68bccdc849501 - md5: 9314bc5a1fe7d1044dc9dfd3ef400535 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libpciaccess >=0.18,<0.19.0a0 - license: MIT - license_family: MIT - size: 310785 - timestamp: 1757212153962 -- conda: https://prefix.dev/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda - sha256: 7fd5408d359d05a969133e47af580183fbf38e2235b562193d427bb9dad79723 - md5: c151d5eb730e9b7480e6d48c0fc44048 - depends: - - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_2 - license: LicenseRef-libglvnd - size: 44840 - timestamp: 1731330973553 -- conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda - sha256: d78f1d3bea8c031d2f032b760f36676d87929b18146351c4464c66b0869df3f5 - md5: e7f7ce06ec24cfcfb9e36d28cf82ba57 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - constrains: - - expat 2.7.4.* - license: MIT - license_family: MIT - size: 76798 - timestamp: 1771259418166 -- conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda - sha256: 8d9d79b2de7d6f335692391f5281607221bf5d040e6724dad4c4d77cd603ce43 - md5: a684eb8a19b2aa68fde0267df172a1e3 - depends: - - __osx >=10.13 - constrains: - - expat 2.7.4.* - license: MIT - license_family: MIT - size: 74578 - timestamp: 1771260142624 -- conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda - sha256: 03887d8080d6a8fe02d75b80929271b39697ecca7628f0657d7afaea87761edf - md5: a92e310ae8dfc206ff449f362fc4217f - depends: - - __osx >=11.0 - constrains: - - expat 2.7.4.* - license: MIT - license_family: MIT - size: 68199 - timestamp: 1771260020767 -- conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda - sha256: 31f19b6a88ce40ebc0d5a992c131f57d919f73c0b92cd1617a5bec83f6e961e6 - md5: a360c33a5abe61c07959e449fa1453eb - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: MIT - license_family: MIT - size: 58592 - timestamp: 1769456073053 -- conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda - sha256: 951958d1792238006fdc6fce7f71f1b559534743b26cc1333497d46e5903a2d6 - md5: 66a0dc7464927d0853b590b6f53ba3ea - depends: - - __osx >=10.13 - license: MIT - license_family: MIT - size: 53583 - timestamp: 1769456300951 -- conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda - sha256: 6686a26466a527585e6a75cc2a242bf4a3d97d6d6c86424a441677917f28bec7 - md5: 43c04d9cb46ef176bb2a4c77e324d599 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 40979 - timestamp: 1769456747661 -- conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda - sha256: e755e234236bdda3d265ae82e5b0581d259a9279e3e5b31d745dc43251ad64fb - md5: 47595b9d53054907a00d95e4d47af1d6 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libiconv >=1.18,<2.0a0 - - libogg >=1.3.5,<1.4.0a0 - - libstdcxx >=14 - license: BSD-3-Clause - license_family: BSD - size: 424563 - timestamp: 1764526740626 -- conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda - sha256: faf7d2017b4d718951e3a59d081eb09759152f93038479b768e3d612688f83f5 - md5: 0aa00f03f9e39fb9876085dee11a85d4 - depends: - - __glibc >=2.17,<3.0.a0 - - _openmp_mutex >=4.5 - constrains: - - libgcc-ng ==15.2.0=*_18 - - libgomp 15.2.0 he0feb66_18 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 1041788 - timestamp: 1771378212382 -- conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda - sha256: e318a711400f536c81123e753d4c797a821021fb38970cebfb3f454126016893 - md5: d5e96b1ed75ca01906b3d2469b4ce493 - depends: - - libgcc 15.2.0 he0feb66_18 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 27526 - timestamp: 1771378224552 -- conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda - sha256: dc2752241fa3d9e40ce552c1942d0a4b5eeb93740c9723873f6fcf8d39ef8d2d - md5: 928b8be80851f5d8ffb016f9c81dae7a - depends: - - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_2 - - libglx 1.7.0 ha4b6fd6_2 - license: LicenseRef-libglvnd - size: 134712 - timestamp: 1731330998354 -- conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda - sha256: a27e44168a1240b15659888ce0d9b938ed4bdb49e9ea68a7c1ff27bcea8b55ce - md5: bb26456332b07f68bf3b7622ed71c0da - depends: - - __glibc >=2.17,<3.0.a0 - - libffi >=3.5.2,<3.6.0a0 - - libgcc >=14 - - libiconv >=1.18,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.47,<10.48.0a0 - constrains: - - glib 2.86.4 *_1 - license: LGPL-2.1-or-later - size: 4398701 - timestamp: 1771863239578 -- conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda - sha256: d45fd67e18e793aeb2485a7efe3e882df594601ed6136ed1863c56109e4ad9e3 - md5: b8437d8dc24f46da3565d7f0c5a96d45 - depends: - - __osx >=11.0 - - libffi >=3.5.2,<3.6.0a0 - - libiconv >=1.18,<2.0a0 - - libintl >=0.25.1,<1.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.47,<10.48.0a0 - constrains: - - glib 2.86.4 *_1 - license: LGPL-2.1-or-later - size: 4186085 - timestamp: 1771863964173 -- conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda - sha256: a4254a241a96198e019ced2e0d2967e4c0ef64fac32077a45c065b32dc2b15d2 - md5: 673069f6725ed7b1073f9b96094294d1 - depends: - - __osx >=11.0 - - libffi >=3.5.2,<3.6.0a0 - - libiconv >=1.18,<2.0a0 - - libintl >=0.25.1,<1.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.47,<10.48.0a0 - constrains: - - glib 2.86.4 *_1 - license: LGPL-2.1-or-later - size: 4108927 - timestamp: 1771864169970 -- conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda - sha256: 1175f8a7a0c68b7f81962699751bb6574e6f07db4c9f72825f978e3016f46850 - md5: 434ca7e50e40f4918ab701e3facd59a0 - depends: - - __glibc >=2.17,<3.0.a0 - license: LicenseRef-libglvnd - size: 132463 - timestamp: 1731330968309 -- conda: https://prefix.dev/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda - sha256: 2d35a679624a93ce5b3e9dd301fff92343db609b79f0363e6d0ceb3a6478bfa7 - md5: c8013e438185f33b13814c5c488acd5c - depends: - - __glibc >=2.17,<3.0.a0 - - libglvnd 1.7.0 ha4b6fd6_2 - - xorg-libx11 >=1.8.10,<2.0a0 - license: LicenseRef-libglvnd - size: 75504 - timestamp: 1731330988898 -- conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda - sha256: 21337ab58e5e0649d869ab168d4e609b033509de22521de1bfed0c031bfc5110 - md5: 239c5e9546c38a1e884d69effcf4c882 - depends: - - __glibc >=2.17,<3.0.a0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 603262 - timestamp: 1771378117851 -- conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda - sha256: c467851a7312765447155e071752d7bf9bf44d610a5687e32706f480aad2833f - md5: 915f5995e94f60e9a4826e0b0920ee88 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: LGPL-2.1-only - size: 790176 - timestamp: 1754908768807 -- conda: https://prefix.dev/conda-forge/osx-64/libiconv-1.18-h57a12c2_2.conda - sha256: a1c8cecdf9966921e13f0ae921309a1f415dfbd2b791f2117cf7e8f5e61a48b6 - md5: 210a85a1119f97ea7887188d176db135 - depends: - - __osx >=10.13 - license: LGPL-2.1-only - size: 737846 - timestamp: 1754908900138 -- conda: https://prefix.dev/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda - sha256: de0336e800b2af9a40bdd694b03870ac4a848161b35c8a2325704f123f185f03 - md5: 4d5a7445f0b25b6a3ddbb56e790f5251 - depends: - - __osx >=11.0 - license: LGPL-2.1-only - size: 750379 - timestamp: 1754909073836 -- conda: https://prefix.dev/conda-forge/osx-64/libintl-0.25.1-h3184127_1.conda - sha256: 8c352744517bc62d24539d1ecc813b9fdc8a785c780197c5f0b84ec5b0dfe122 - md5: a8e54eefc65645193c46e8b180f62d22 - depends: - - __osx >=10.13 - - libiconv >=1.18,<2.0a0 - license: LGPL-2.1-or-later - size: 96909 - timestamp: 1753343977382 -- conda: https://prefix.dev/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda - sha256: 99d2cebcd8f84961b86784451b010f5f0a795ed1c08f1e7c76fbb3c22abf021a - md5: 5103f6a6b210a3912faf8d7db516918c - depends: - - __osx >=11.0 - - libiconv >=1.18,<2.0a0 - license: LGPL-2.1-or-later - size: 90957 - timestamp: 1751558394144 -- conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda - sha256: 755c55ebab181d678c12e49cced893598f2bab22d582fbbf4d8b83c18be207eb - md5: c7c83eecbb72d88b940c249af56c8b17 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - constrains: - - xz 5.8.2.* - license: 0BSD - size: 113207 - timestamp: 1768752626120 -- conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda - sha256: ffb066ddf2e76953f92e06677021c73c85536098f1c21fcd15360dbc859e22e4 - md5: 68e52064ed3897463c0e958ab5c8f91b - depends: - - libgcc >=13 - - __glibc >=2.17,<3.0.a0 - license: BSD-3-Clause - license_family: BSD - size: 218500 - timestamp: 1745825989535 -- conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda - sha256: f1061a26213b9653bbb8372bfa3f291787ca091a9a3060a10df4d5297aad74fd - md5: 2446ac1fe030c2aa6141386c1f5a6aed - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: BSD-3-Clause - license_family: BSD - size: 324993 - timestamp: 1768497114401 -- conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda - sha256: 0bd91de9b447a2991e666f284ae8c722ffb1d84acb594dbd0c031bd656fa32b2 - md5: 70e3400cbbfa03e96dcde7fc13e38c7b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 28424 - timestamp: 1749901812541 -- conda: https://prefix.dev/conda-forge/linux-64/libsndfile-1.2.2-hc7d488a_2.conda - sha256: 57cb5f92110324c04498b96563211a1bca6a74b2918b1e8df578bfed03cc32e4 - md5: 067590f061c9f6ea7e61e3b2112ed6b3 - depends: - - __glibc >=2.17,<3.0.a0 - - lame >=3.100,<3.101.0a0 - - libflac >=1.5.0,<1.6.0a0 - - libgcc >=14 - - libogg >=1.3.5,<1.4.0a0 - - libopus >=1.5.2,<2.0a0 - - libstdcxx >=14 - - libvorbis >=1.3.7,<1.4.0a0 - - mpg123 >=1.32.9,<1.33.0a0 - license: LGPL-2.1-or-later - license_family: LGPL - size: 355619 - timestamp: 1765181778282 -- conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda - sha256: 78668020064fdaa27e9ab65cd2997e2c837b564ab26ce3bf0e58a2ce1a525c6e - md5: 1b08cd684f34175e4514474793d44bcb - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc 15.2.0 he0feb66_18 - constrains: - - libstdcxx-ng ==15.2.0=*_18 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 5852330 - timestamp: 1771378262446 -- conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda - sha256: f0356bb344a684e7616fc84675cfca6401140320594e8686be30e8ac7547aed2 - md5: 1d4c18d75c51ed9d00092a891a547a7d - depends: - - __glibc >=2.17,<3.0.a0 - - libcap >=2.77,<2.78.0a0 - - libgcc >=14 - license: LGPL-2.1-or-later - size: 491953 - timestamp: 1770738638119 -- conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda - sha256: ed4d2c01fbeb1330f112f7e399408634db277d3dfb2dec1d0395f56feaa24351 - md5: 6c74fba677b61a0842cbf0f63eee683b - depends: - - __glibc >=2.17,<3.0.a0 - - libcap >=2.77,<2.78.0a0 - - libgcc >=14 - license: LGPL-2.1-or-later - size: 144654 - timestamp: 1770738650966 -- conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda - sha256: 71c8b9d5c72473752a0bb6e91b01dd209a03916cb71f36cc6a564e3a2a132d7a - md5: e179a69edd30d75c0144d7a380b88f28 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libstdcxx >=14 - license: MIT - license_family: MIT - size: 75995 - timestamp: 1757032240102 -- conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda - sha256: 3d17b7aa90610afc65356e9e6149aeac0b2df19deda73a51f0a09cf04fd89286 - md5: 56f65185b520e016d29d01657ac02c0d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libstdcxx >=14 - license: MIT - license_family: MIT - size: 154203 - timestamp: 1770566529700 -- conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda - sha256: 89c84f5b26028a9d0f5c4014330703e7dff73ba0c98f90103e9cef6b43a5323c - md5: d17e3fb595a9f24fa9e149239a33475d - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libudev1 >=257.4 - license: LGPL-2.1-or-later - size: 89551 - timestamp: 1748856210075 -- conda: https://prefix.dev/conda-forge/osx-64/libusb-1.0.29-h2287256_0.conda - sha256: b46c1c71d8be2d19615a10eaa997b3547848d1aee25a7e9486ad1ca8d61626a7 - md5: e5d5fd6235a259665d7652093dc7d6f1 - depends: - - __osx >=10.13 - license: LGPL-2.1-or-later - size: 85523 - timestamp: 1748856209535 -- conda: https://prefix.dev/conda-forge/osx-arm64/libusb-1.0.29-hbc156a2_0.conda - sha256: 5eee9a2bf359e474d4548874bcfc8d29ebad0d9ba015314439c256904e40aaad - md5: f6654e9e96e9d973981b3b2f898a5bfa - depends: - - __osx >=11.0 - license: LGPL-2.1-or-later - size: 83849 - timestamp: 1748856224950 -- conda: https://prefix.dev/conda-forge/win-64/libusb-1.0.29-h1839187_0.conda - sha256: 9837f8e8de20b6c9c033561cd33b4554cd551b217e3b8d2862b353ed2c23d8b8 - md5: a656b2c367405cd24988cf67ff2675aa - depends: - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - - ucrt >=10.0.20348.0 - license: LGPL-2.1-or-later - size: 118204 - timestamp: 1748856290542 -- conda: https://prefix.dev/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda - sha256: ca494c99c7e5ecc1b4cd2f72b5584cef3d4ce631d23511184411abcbb90a21a5 - md5: b4ecbefe517ed0157c37f8182768271c - depends: - - libogg - - libgcc >=14 - - __glibc >=2.17,<3.0.a0 - - libstdcxx >=14 - - libgcc >=14 - - libogg >=1.3.5,<1.4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 285894 - timestamp: 1753879378005 -- conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda - sha256: a68280d57dfd29e3d53400409a39d67c4b9515097eba733aa6fe00c880620e2b - md5: 31ad065eda3c2d88f8215b1289df9c89 - depends: - - __glibc >=2.17,<3.0.a0 - - libstdcxx >=14 - - libgcc >=14 - - xorg-libx11 >=1.8.12,<2.0a0 - - xorg-libxrandr >=1.5.5,<2.0a0 - constrains: - - libvulkan-headers 1.4.341.0.* - license: Apache-2.0 - license_family: APACHE - size: 199795 - timestamp: 1770077125520 -- conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda - sha256: ce9bc992ffffdefbde5f7977b0a3ad9036650f8323611e4024908755891674e0 - md5: dcce6338514e65c2b7fdf172f1264561 - depends: - - __osx >=10.13 - - libcxx >=19 - constrains: - - libvulkan-headers 1.4.341.0.* - license: Apache-2.0 - license_family: APACHE - size: 182703 - timestamp: 1770077140315 -- conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda - sha256: d2790dafc9149b1acd45b9033d02cfa3f3e9ee5af97bd61e0a5718c414a0a135 - md5: 6b4c9a5b130759136a0dde0c373cb0ea - depends: - - __osx >=11.0 - - libcxx >=19 - constrains: - - libvulkan-headers 1.4.341.0.* - license: Apache-2.0 - license_family: APACHE - size: 180304 - timestamp: 1770077143460 -- conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda - sha256: 0f0965edca8b255187604fc7712c53fe9064b31a1845a7dfb2b63bf660de84a7 - md5: 804880b2674119b84277d6c16b01677d - depends: - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 - constrains: - - libvulkan-headers 1.4.341.0.* - license: Apache-2.0 - license_family: APACHE - size: 282251 - timestamp: 1770077165680 -- conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda - sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa - md5: 92ed62436b625154323d40d5f2f11dd7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - pthread-stubs - - xorg-libxau >=1.0.11,<2.0a0 - - xorg-libxdmcp - license: MIT - license_family: MIT - size: 395888 - timestamp: 1727278577118 -- conda: https://prefix.dev/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda - sha256: d2195b5fbcb0af1ff7b345efdf89290c279b8d1d74f325ae0ac98148c375863c - md5: 2bca1fbb221d9c3c8e3a155784bbc2e9 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libstdcxx >=14 - - libxcb >=1.17.0,<2.0a0 - - libxml2 - - libxml2-16 >=2.14.6 - - xkeyboard-config - - xorg-libxau >=1.0.12,<2.0a0 - license: MIT/X11 Derivative - license_family: MIT - size: 837922 - timestamp: 1764794163823 -- conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda - sha256: a9612f88139197b2777a00325c72d872507e70d4f4111021f65e55797f97de67 - md5: 672c49f67192f0a7c2fa55986219d197 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libiconv >=1.18,<2.0a0 - - liblzma >=5.8.2,<6.0a0 - - libxml2-16 2.15.2 hf2a90c1_0 - - libzlib >=1.3.1,<2.0a0 - constrains: - - icu <0.0a0 - license: MIT - license_family: MIT - size: 46783 - timestamp: 1772704627225 -- conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda - sha256: 9448a9080c8c2d32964f4005a75bf9f5879e4e3163de23f8efe361f1d6234e2b - md5: e44f52764dd288c96c4676a967b7e112 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libiconv >=1.18,<2.0a0 - - liblzma >=5.8.2,<6.0a0 - - libzlib >=1.3.1,<2.0a0 - constrains: - - libxml2 2.15.2 - - icu <0.0a0 - license: MIT - license_family: MIT - size: 556790 - timestamp: 1772704614138 -- conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda - sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 - md5: edb0dca6bc32e4f4789199455a1dbeb8 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 60963 - timestamp: 1727963148474 -- conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda - sha256: 8412f96504fc5993a63edf1e211d042a1fd5b1d51dedec755d2058948fcced09 - md5: 003a54a4e32b02f7355b50a837e699da - depends: - - __osx >=10.13 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 57133 - timestamp: 1727963183990 -- conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda - sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b - md5: 369964e85dc26bfe78f41399b366c435 - depends: - - __osx >=11.0 - constrains: - - zlib 1.3.1 *_2 - license: Zlib - license_family: Other - size: 46438 - timestamp: 1727963202283 -- conda: https://prefix.dev/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda - sha256: 39c4700fb3fbe403a77d8cc27352fa72ba744db487559d5d44bf8411bb4ea200 - md5: c7f302fd11eeb0987a6a5e1f3aed6a21 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - libstdcxx >=13 - license: LGPL-2.1-only - license_family: LGPL - size: 491140 - timestamp: 1730581373280 -- conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda - sha256: 5e6f7d161356fefd981948bea5139c5aa0436767751a6930cb1ca801ebb113ff - md5: 7a3bff861a6583f1889021facefc08b1 - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc >=14 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 1222481 - timestamp: 1763655398280 -- conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda - sha256: 8d64a9d36073346542e5ea042ef8207a45a0069a2e65ce3323ee3146db78134c - md5: 08f970fb2b75f5be27678e077ebedd46 - depends: - - __osx >=10.13 - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 1106584 - timestamp: 1763655837207 -- conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda - sha256: 5e2e443f796f2fd92adf7978286a525fb768c34e12b1ee9ded4000a41b2894ba - md5: 9b4190c4055435ca3502070186eba53a - depends: - - __osx >=11.0 - - bzip2 >=1.0.8,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 850231 - timestamp: 1763655726735 -- conda: https://prefix.dev/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda - sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 - md5: b3c17d95b5a10c6e64a21fa17573e70e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - license: MIT - license_family: MIT - size: 8252 - timestamp: 1726802366959 -- conda: https://prefix.dev/conda-forge/linux-64/pulseaudio-client-17.0-h9a6aba3_3.conda - sha256: 0a0858c59805d627d02bdceee965dd84fde0aceab03a2f984325eec08d822096 - md5: b8ea447fdf62e3597cb8d2fae4eb1a90 - depends: - - __glibc >=2.17,<3.0.a0 - - dbus >=1.16.2,<2.0a0 - - libgcc >=14 - - libglib >=2.86.1,<3.0a0 - - libiconv >=1.18,<2.0a0 - - libsndfile >=1.2.2,<1.3.0a0 - - libsystemd0 >=257.10 - - libxcb >=1.17.0,<2.0a0 - constrains: - - pulseaudio 17.0 *_3 - license: LGPL-2.1-or-later - license_family: LGPL - size: 750785 - timestamp: 1763148198088 -- conda: https://prefix.dev/conda-forge/linux-64/sdl2-2.32.56-h54a6638_0.conda - sha256: 987ad072939fdd51c92ea8d3544b286bb240aefda329f9b03a51d9b7e777f9de - md5: cdd138897d94dc07d99afe7113a07bec - depends: - - libstdcxx >=14 - - libgcc >=14 - - __glibc >=2.17,<3.0.a0 - - libgl >=1.7.0,<2.0a0 - - sdl3 >=3.2.22,<4.0a0 - - libegl >=1.7.0,<2.0a0 - license: Zlib - size: 589145 - timestamp: 1757842881 -- conda: https://prefix.dev/conda-forge/osx-64/sdl2-2.32.56-h53ec75d_0.conda - sha256: 3f64f2cabdfe2f4ed8df6adf26a86bd9db07380cb8fa28d18a80040cc8b8b7d9 - md5: 0a8a18995e507da927d1f8c4b7f15ca8 - depends: - - __osx >=10.13 - - libcxx >=19 - - sdl3 >=3.2.22,<4.0a0 - license: Zlib - size: 740066 - timestamp: 1757842955775 -- conda: https://prefix.dev/conda-forge/osx-arm64/sdl2-2.32.56-h248ca61_0.conda - sha256: 704c5cae4bc839a18c70cbf3387d7789f1902828c79c6ddabcd34daf594f4103 - md5: 092c5b693dc6adf5f409d12f33295a2a - depends: - - libcxx >=19 - - __osx >=11.0 - - sdl3 >=3.2.22,<4.0a0 - license: Zlib - size: 542508 - timestamp: 1757842919681 -- conda: https://prefix.dev/conda-forge/win-64/sdl2-2.32.56-h5112557_0.conda - sha256: d17da21386bdbf32bce5daba5142916feb95eed63ef92b285808c765705bbfd2 - md5: 4cffbfebb6614a1bff3fc666527c25c7 - depends: - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 - - sdl3 >=3.2.22,<4.0a0 - license: Zlib - size: 572101 - timestamp: 1757842925694 -- conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda - sha256: 64b982664550e01c25f8f09333c0ee54d4764a80fe8636b8aaf881fe6e8a0dbe - md5: 88a69db027a8ff59dab972a09d69a1ab - depends: - - __glibc >=2.17,<3.0.a0 - - libstdcxx >=14 - - libgcc >=14 - - xorg-libxscrnsaver >=1.2.4,<2.0a0 - - libdrm >=2.4.125,<2.5.0a0 - - xorg-libxfixes >=6.0.2,<7.0a0 - - libudev1 >=257.10 - - pulseaudio-client >=17.0,<17.1.0a0 - - xorg-libxtst >=1.2.5,<2.0a0 - - libegl >=1.7.0,<2.0a0 - - libvulkan-loader >=1.4.341.0,<2.0a0 - - xorg-libxcursor >=1.2.3,<2.0a0 - - xorg-libx11 >=1.8.13,<2.0a0 - - liburing >=2.14,<2.15.0a0 - - libxkbcommon >=1.13.1,<2.0a0 - - libunwind >=1.8.3,<1.9.0a0 - - libusb >=1.0.29,<2.0a0 - - dbus >=1.16.2,<2.0a0 - - xorg-libxext >=1.3.7,<2.0a0 - - libgl >=1.7.0,<2.0a0 - - xorg-libxi >=1.8.2,<2.0a0 - - wayland >=1.24.0,<2.0a0 - license: Zlib - size: 2138749 - timestamp: 1771668185803 -- conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda - sha256: b3939796f728f52be95a0f95c89bfd890af3a613fe7f6ab17c9fb6ea477812e8 - md5: bb826b1b04460daaef656f99432a5770 - depends: - - __osx >=11.0 - - libcxx >=19 - - dbus >=1.16.2,<2.0a0 - - libusb >=1.0.29,<2.0a0 - - libvulkan-loader >=1.4.341.0,<2.0a0 - license: Zlib - size: 1696561 - timestamp: 1771668206362 -- conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda - sha256: e0589f700a9e9c188ba54c7ba5482885dc2e025f01de30fab098896cd6fda0a3 - md5: 5e999442b4391dcd702f6026ac1a23f2 - depends: - - libcxx >=19 - - __osx >=11.0 - - libusb >=1.0.29,<2.0a0 - - libvulkan-loader >=1.4.341.0,<2.0a0 - - dbus >=1.16.2,<2.0a0 - license: Zlib - size: 1556104 - timestamp: 1771668215375 -- conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda - sha256: a4677774a9d542c6f4bac8779a2d7105748d38d8b7d56c8d02f36d14fba471b9 - md5: a0256884d35489e520360267e67ce3fc - depends: - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 - - libvulkan-loader >=1.4.341.0,<2.0a0 - - libusb >=1.0.29,<2.0a0 - license: Zlib - size: 1669623 - timestamp: 1771668231217 -- source: sdl_example[6711f831] @ . - variants: - cxx_compiler: vs2022 - target_platform: win-64 - depends: - - vc >=14.3,<15 - - vc14_runtime >=14.44.35208 - - ucrt >=10.0.20348.0 - - sdl2 >=2.32.56,<3.0a0 -- source: sdl_example[bd431642] @ . - variants: - target_platform: linux-64 - depends: - - libstdcxx >=15 - - libgcc >=15 - - sdl2 >=2.32.56,<3.0a0 -- source: sdl_example[2815fc71] @ . - variants: - target_platform: osx-64 - depends: - - libcxx >=22 - - sdl2 >=2.32.56,<3.0a0 -- source: sdl_example[f88715ba] @ . - variants: - target_platform: osx-arm64 - depends: - - libcxx >=22 - - sdl2 >=2.32.56,<3.0a0 -- conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda - sha256: 3005729dce6f3d3f5ec91dfc49fc75a0095f9cd23bab49efb899657297ac91a5 - md5: 71b24316859acd00bdb8b38f5e2ce328 - constrains: - - vc14_runtime >=14.29.30037 - - vs2015_runtime >=14.29.30037 - license: LicenseRef-MicrosoftWindowsSDK10 - size: 694692 - timestamp: 1756385147981 -- conda: https://prefix.dev/conda-forge/win-64/vc-14.3-h41ae7f8_34.conda - sha256: 9dc40c2610a6e6727d635c62cced5ef30b7b30123f5ef67d6139e23d21744b3a - md5: 1e610f2416b6acdd231c5f573d754a0f - depends: - - vc14_runtime >=14.44.35208 - track_features: - - vc14 - license: BSD-3-Clause - license_family: BSD - size: 19356 - timestamp: 1767320221521 -- conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.44.35208-h818238b_34.conda - sha256: 02732f953292cce179de9b633e74928037fa3741eb5ef91c3f8bae4f761d32a5 - md5: 37eb311485d2d8b2c419449582046a42 - depends: - - ucrt >=10.0.20348.0 - - vcomp14 14.44.35208 h818238b_34 - constrains: - - vs2015_runtime 14.44.35208.* *_34 - license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime - license_family: Proprietary - size: 683233 - timestamp: 1767320219644 -- conda: https://prefix.dev/conda-forge/win-64/vcomp14-14.44.35208-h818238b_34.conda - sha256: 878d5d10318b119bd98ed3ed874bd467acbe21996e1d81597a1dbf8030ea0ce6 - md5: 242d9f25d2ae60c76b38a5e42858e51d - depends: - - ucrt >=10.0.20348.0 - constrains: - - vs2015_runtime 14.44.35208.* *_34 - license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime - license_family: Proprietary - size: 115235 - timestamp: 1767320173250 -- conda: https://prefix.dev/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda - sha256: 3aa04ae8e9521d9b56b562376d944c3e52b69f9d2a0667f77b8953464822e125 - md5: 035da2e4f5770f036ff704fa17aace24 - depends: - - __glibc >=2.17,<3.0.a0 - - libexpat >=2.7.1,<3.0a0 - - libffi >=3.5.2,<3.6.0a0 - - libgcc >=14 - - libstdcxx >=14 - license: MIT - license_family: MIT - size: 329779 - timestamp: 1761174273487 -- conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda - sha256: 19c2bb14bec84b0e995b56b752369775c75f1589314b43733948bb5f471a6915 - md5: b56e0c8432b56decafae7e78c5f29ba5 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - xorg-libx11 >=1.8.13,<2.0a0 - license: MIT - license_family: MIT - size: 399291 - timestamp: 1772021302485 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda - sha256: 516d4060139dbb4de49a4dcdc6317a9353fb39ebd47789c14e6fe52de0deee42 - md5: 861fb6ccbc677bb9a9fb2468430b9c6a - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - libxcb >=1.17.0,<2.0a0 - license: MIT - license_family: MIT - size: 839652 - timestamp: 1770819209719 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda - sha256: 6bc6ab7a90a5d8ac94c7e300cc10beb0500eeba4b99822768ca2f2ef356f731b - md5: b2895afaf55bf96a8c8282a2e47a5de0 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: MIT - license_family: MIT - size: 15321 - timestamp: 1762976464266 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda - sha256: 832f538ade441b1eee863c8c91af9e69b356cd3e9e1350fff4fe36cc573fc91a - md5: 2ccd714aa2242315acaf0a67faea780b - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - - xorg-libxrender >=0.9.11,<0.10.0a0 - license: MIT - license_family: MIT - size: 32533 - timestamp: 1730908305254 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda - sha256: 25d255fb2eef929d21ff660a0c687d38a6d2ccfbcbf0cc6aa738b12af6e9d142 - md5: 1dafce8548e38671bea82e3f5c6ce22f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - license: MIT - license_family: MIT - size: 20591 - timestamp: 1762976546182 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda - sha256: 79c60fc6acfd3d713d6340d3b4e296836a0f8c51602327b32794625826bd052f - md5: 34e54f03dfea3e7a2dcf1453a85f1085 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - xorg-libx11 >=1.8.12,<2.0a0 - license: MIT - license_family: MIT - size: 50326 - timestamp: 1769445253162 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda - sha256: 83c4c99d60b8784a611351220452a0a85b080668188dce5dfa394b723d7b64f4 - md5: ba231da7fccf9ea1e768caf5c7099b84 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - xorg-libx11 >=1.8.12,<2.0a0 - license: MIT - license_family: MIT - size: 20071 - timestamp: 1759282564045 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda - sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a - md5: 17dcc85db3c7886650b8908b183d6876 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxfixes >=6.0.1,<7.0a0 - license: MIT - license_family: MIT - size: 47179 - timestamp: 1727799254088 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda - sha256: 80ed047a5cb30632c3dc5804c7716131d767089f65877813d4ae855ee5c9d343 - md5: e192019153591938acf7322b6459d36e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=14 - - xorg-libx11 >=1.8.12,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxrender >=0.9.12,<0.10.0a0 - license: MIT - license_family: MIT - size: 30456 - timestamp: 1769445263457 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda - sha256: 044c7b3153c224c6cedd4484dd91b389d2d7fd9c776ad0f4a34f099b3389f4a1 - md5: 96d57aba173e878a2089d5638016dc5e - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - license: MIT - license_family: MIT - size: 33005 - timestamp: 1734229037766 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxscrnsaver-1.2.4-hb9d3cd8_0.conda - sha256: 58e8fc1687534124832d22e102f098b5401173212ac69eb9fd96b16a3e2c8cb2 - md5: 303f7a0e9e0cd7d250bb6b952cecda90 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - license: MIT - license_family: MIT - size: 14412 - timestamp: 1727899730073 -- conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda - sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a - md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc >=13 - - xorg-libx11 >=1.8.10,<2.0a0 - - xorg-libxext >=1.3.6,<2.0a0 - - xorg-libxi >=1.7.10,<2.0a0 - license: MIT - license_family: MIT - size: 32808 - timestamp: 1727964811275 + - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + build_number: 20 + sha256: 1dd3fffd892081df9726d7eb7e0dea6198962ba775bd88842135a4ddb4deb3c9 + md5: a9f577daf3de00bca7c3c76c0ecbd1de + depends: + - __glibc >=2.17,<3.0.a0 + - libgomp >=7.5.0 + constrains: + - openmp_impl <0.0a0 + license: BSD-3-Clause + license_family: BSD + size: 28948 + timestamp: 1770939786096 + - conda: https://prefix.dev/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda + sha256: a9c114cbfeda42a226e2db1809a538929d2f118ef855372293bd188f71711c48 + md5: 791365c5f65975051e4e017b5da3abf5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: GPL-2.0-or-later + license_family: GPL + size: 68072 + timestamp: 1756738968573 + - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + sha256: 0b75d45f0bba3e95dc693336fa51f40ea28c980131fec438afb7ce6118ed05f6 + md5: d2ffd7602c02f2b316fd921d39876885 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: bzip2-1.0.6 + license_family: BSD + size: 260182 + timestamp: 1771350215188 + - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + sha256: 9f242f13537ef1ce195f93f0cc162965d6cc79da578568d6d8e50f70dd025c42 + md5: 4173ac3b19ec0a4f400b4f782910368b + depends: + - __osx >=10.13 + license: bzip2-1.0.6 + license_family: BSD + size: 133427 + timestamp: 1771350680709 + - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + sha256: 540fe54be35fac0c17feefbdc3e29725cce05d7367ffedfaaa1bdda234b019df + md5: 620b85a3f45526a8bc4d23fd78fc22f0 + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + size: 124834 + timestamp: 1771350416561 + - conda: https://prefix.dev/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda + sha256: 8bb557af1b2b7983cf56292336a1a1853f26555d9c6cecf1e5b2b96838c9da87 + md5: ce96f2f470d39bd96ce03945af92e280 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - libglib >=2.86.2,<3.0a0 + - libexpat >=2.7.3,<3.0a0 + license: AFL-2.1 OR GPL-2.0-or-later + size: 447649 + timestamp: 1764536047944 + - conda: https://prefix.dev/conda-forge/osx-64/dbus-1.16.2-h6e7f9a9_1.conda + sha256: 80ea0a20236ecb7006f7a89235802a34851eaac2f7f4323ca7acc094bcf7f372 + md5: cdbed7d22d4bdd74e60ce78bc7c6dd58 + depends: + - __osx >=10.13 + - libcxx >=19 + - libexpat >=2.7.3,<3.0a0 + - libglib >=2.86.2,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + license: AFL-2.1 OR GPL-2.0-or-later + size: 407670 + timestamp: 1764536068038 + - conda: https://prefix.dev/conda-forge/osx-arm64/dbus-1.16.2-h3ff7a7c_1.conda + sha256: a8207751ed261764061866880da38e4d3063e167178bfe85b6db9501432462ba + md5: 5a3506971d2d53023c1c4450e908a8da + depends: + - libcxx >=19 + - __osx >=11.0 + - libglib >=2.86.2,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + license: AFL-2.1 OR GPL-2.0-or-later + size: 393811 + timestamp: 1764536084131 + - conda: https://prefix.dev/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 + sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab + md5: a8832b479f93521a9e7b5b743803be51 + depends: + - libgcc-ng >=12 + license: LGPL-2.0-only + license_family: LGPL + size: 508258 + timestamp: 1664996250081 + - conda: https://prefix.dev/conda-forge/linux-64/libcap-2.77-h3ff7636_0.conda + sha256: 9517cce5193144af0fcbf19b7bd67db0a329c2cc2618f28ffecaa921a1cbe9d3 + md5: 09c264d40c67b82b49a3f3b89037bd2e + depends: + - __glibc >=2.17,<3.0.a0 + - attr >=2.5.2,<2.6.0a0 + - libgcc >=14 + license: BSD-3-Clause + license_family: BSD + size: 121429 + timestamp: 1762349484074 + - conda: https://prefix.dev/conda-forge/osx-64/libcxx-22.1.0-h19cb2f5_1.conda + sha256: fa002b43752fe5860e588435525195324fe250287105ebd472ac138e97de45e6 + md5: 836389b6b9ae58f3fbcf7cafebd5c7f2 + depends: + - __osx >=11.0 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + size: 570141 + timestamp: 1772001147762 + - conda: https://prefix.dev/conda-forge/osx-arm64/libcxx-22.1.0-h55c6f16_1.conda + sha256: ce1049fa6fda9cf08ff1c50fb39573b5b0ea6958375d8ea7ccd8456ab81a0bcb + md5: e9c56daea841013e7774b5cd46f41564 + depends: + - __osx >=11.0 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + size: 568910 + timestamp: 1772001095642 + - conda: https://prefix.dev/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda + sha256: c076a213bd3676cc1ef22eeff91588826273513ccc6040d9bea68bccdc849501 + md5: 9314bc5a1fe7d1044dc9dfd3ef400535 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libpciaccess >=0.18,<0.19.0a0 + license: MIT + license_family: MIT + size: 310785 + timestamp: 1757212153962 + - conda: https://prefix.dev/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda + sha256: 7fd5408d359d05a969133e47af580183fbf38e2235b562193d427bb9dad79723 + md5: c151d5eb730e9b7480e6d48c0fc44048 + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + license: LicenseRef-libglvnd + size: 44840 + timestamp: 1731330973553 + - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + sha256: d78f1d3bea8c031d2f032b760f36676d87929b18146351c4464c66b0869df3f5 + md5: e7f7ce06ec24cfcfb9e36d28cf82ba57 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + size: 76798 + timestamp: 1771259418166 + - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + sha256: 8d9d79b2de7d6f335692391f5281607221bf5d040e6724dad4c4d77cd603ce43 + md5: a684eb8a19b2aa68fde0267df172a1e3 + depends: + - __osx >=10.13 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + size: 74578 + timestamp: 1771260142624 + - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + sha256: 03887d8080d6a8fe02d75b80929271b39697ecca7628f0657d7afaea87761edf + md5: a92e310ae8dfc206ff449f362fc4217f + depends: + - __osx >=11.0 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + size: 68199 + timestamp: 1771260020767 + - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + sha256: 31f19b6a88ce40ebc0d5a992c131f57d919f73c0b92cd1617a5bec83f6e961e6 + md5: a360c33a5abe61c07959e449fa1453eb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + size: 58592 + timestamp: 1769456073053 + - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + sha256: 951958d1792238006fdc6fce7f71f1b559534743b26cc1333497d46e5903a2d6 + md5: 66a0dc7464927d0853b590b6f53ba3ea + depends: + - __osx >=10.13 + license: MIT + license_family: MIT + size: 53583 + timestamp: 1769456300951 + - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + sha256: 6686a26466a527585e6a75cc2a242bf4a3d97d6d6c86424a441677917f28bec7 + md5: 43c04d9cb46ef176bb2a4c77e324d599 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + size: 40979 + timestamp: 1769456747661 + - conda: https://prefix.dev/conda-forge/linux-64/libflac-1.5.0-he200343_1.conda + sha256: e755e234236bdda3d265ae82e5b0581d259a9279e3e5b31d745dc43251ad64fb + md5: 47595b9d53054907a00d95e4d47af1d6 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - libogg >=1.3.5,<1.4.0a0 + - libstdcxx >=14 + license: BSD-3-Clause + license_family: BSD + size: 424563 + timestamp: 1764526740626 + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + sha256: faf7d2017b4d718951e3a59d081eb09759152f93038479b768e3d612688f83f5 + md5: 0aa00f03f9e39fb9876085dee11a85d4 + depends: + - __glibc >=2.17,<3.0.a0 + - _openmp_mutex >=4.5 + constrains: + - libgcc-ng ==15.2.0=*_18 + - libgomp 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 1041788 + timestamp: 1771378212382 + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + sha256: e318a711400f536c81123e753d4c797a821021fb38970cebfb3f454126016893 + md5: d5e96b1ed75ca01906b3d2469b4ce493 + depends: + - libgcc 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 27526 + timestamp: 1771378224552 + - conda: https://prefix.dev/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda + sha256: dc2752241fa3d9e40ce552c1942d0a4b5eeb93740c9723873f6fcf8d39ef8d2d + md5: 928b8be80851f5d8ffb016f9c81dae7a + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + - libglx 1.7.0 ha4b6fd6_2 + license: LicenseRef-libglvnd + size: 134712 + timestamp: 1731330998354 + - conda: https://prefix.dev/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda + sha256: a27e44168a1240b15659888ce0d9b938ed4bdb49e9ea68a7c1ff27bcea8b55ce + md5: bb26456332b07f68bf3b7622ed71c0da + depends: + - __glibc >=2.17,<3.0.a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.47,<10.48.0a0 + constrains: + - glib 2.86.4 *_1 + license: LGPL-2.1-or-later + size: 4398701 + timestamp: 1771863239578 + - conda: https://prefix.dev/conda-forge/osx-64/libglib-2.86.4-hec30fc1_1.conda + sha256: d45fd67e18e793aeb2485a7efe3e882df594601ed6136ed1863c56109e4ad9e3 + md5: b8437d8dc24f46da3565d7f0c5a96d45 + depends: + - __osx >=11.0 + - libffi >=3.5.2,<3.6.0a0 + - libiconv >=1.18,<2.0a0 + - libintl >=0.25.1,<1.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.47,<10.48.0a0 + constrains: + - glib 2.86.4 *_1 + license: LGPL-2.1-or-later + size: 4186085 + timestamp: 1771863964173 + - conda: https://prefix.dev/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + sha256: a4254a241a96198e019ced2e0d2967e4c0ef64fac32077a45c065b32dc2b15d2 + md5: 673069f6725ed7b1073f9b96094294d1 + depends: + - __osx >=11.0 + - libffi >=3.5.2,<3.6.0a0 + - libiconv >=1.18,<2.0a0 + - libintl >=0.25.1,<1.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.47,<10.48.0a0 + constrains: + - glib 2.86.4 *_1 + license: LGPL-2.1-or-later + size: 4108927 + timestamp: 1771864169970 + - conda: https://prefix.dev/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda + sha256: 1175f8a7a0c68b7f81962699751bb6574e6f07db4c9f72825f978e3016f46850 + md5: 434ca7e50e40f4918ab701e3facd59a0 + depends: + - __glibc >=2.17,<3.0.a0 + license: LicenseRef-libglvnd + size: 132463 + timestamp: 1731330968309 + - conda: https://prefix.dev/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda + sha256: 2d35a679624a93ce5b3e9dd301fff92343db609b79f0363e6d0ceb3a6478bfa7 + md5: c8013e438185f33b13814c5c488acd5c + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + - xorg-libx11 >=1.8.10,<2.0a0 + license: LicenseRef-libglvnd + size: 75504 + timestamp: 1731330988898 + - conda: https://prefix.dev/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + sha256: 21337ab58e5e0649d869ab168d4e609b033509de22521de1bfed0c031bfc5110 + md5: 239c5e9546c38a1e884d69effcf4c882 + depends: + - __glibc >=2.17,<3.0.a0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 603262 + timestamp: 1771378117851 + - conda: https://prefix.dev/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda + sha256: c467851a7312765447155e071752d7bf9bf44d610a5687e32706f480aad2833f + md5: 915f5995e94f60e9a4826e0b0920ee88 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: LGPL-2.1-only + size: 790176 + timestamp: 1754908768807 + - conda: https://prefix.dev/conda-forge/osx-64/libiconv-1.18-h57a12c2_2.conda + sha256: a1c8cecdf9966921e13f0ae921309a1f415dfbd2b791f2117cf7e8f5e61a48b6 + md5: 210a85a1119f97ea7887188d176db135 + depends: + - __osx >=10.13 + license: LGPL-2.1-only + size: 737846 + timestamp: 1754908900138 + - conda: https://prefix.dev/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda + sha256: de0336e800b2af9a40bdd694b03870ac4a848161b35c8a2325704f123f185f03 + md5: 4d5a7445f0b25b6a3ddbb56e790f5251 + depends: + - __osx >=11.0 + license: LGPL-2.1-only + size: 750379 + timestamp: 1754909073836 + - conda: https://prefix.dev/conda-forge/osx-64/libintl-0.25.1-h3184127_1.conda + sha256: 8c352744517bc62d24539d1ecc813b9fdc8a785c780197c5f0b84ec5b0dfe122 + md5: a8e54eefc65645193c46e8b180f62d22 + depends: + - __osx >=10.13 + - libiconv >=1.18,<2.0a0 + license: LGPL-2.1-or-later + size: 96909 + timestamp: 1753343977382 + - conda: https://prefix.dev/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda + sha256: 99d2cebcd8f84961b86784451b010f5f0a795ed1c08f1e7c76fbb3c22abf021a + md5: 5103f6a6b210a3912faf8d7db516918c + depends: + - __osx >=11.0 + - libiconv >=1.18,<2.0a0 + license: LGPL-2.1-or-later + size: 90957 + timestamp: 1751558394144 + - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + sha256: 755c55ebab181d678c12e49cced893598f2bab22d582fbbf4d8b83c18be207eb + md5: c7c83eecbb72d88b940c249af56c8b17 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - xz 5.8.2.* + license: 0BSD + size: 113207 + timestamp: 1768752626120 + - conda: https://prefix.dev/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda + sha256: ffb066ddf2e76953f92e06677021c73c85536098f1c21fcd15360dbc859e22e4 + md5: 68e52064ed3897463c0e958ab5c8f91b + depends: + - libgcc >=13 + - __glibc >=2.17,<3.0.a0 + license: BSD-3-Clause + license_family: BSD + size: 218500 + timestamp: 1745825989535 + - conda: https://prefix.dev/conda-forge/linux-64/libopus-1.6.1-h280c20c_0.conda + sha256: f1061a26213b9653bbb8372bfa3f291787ca091a9a3060a10df4d5297aad74fd + md5: 2446ac1fe030c2aa6141386c1f5a6aed + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: BSD-3-Clause + license_family: BSD + size: 324993 + timestamp: 1768497114401 + - conda: https://prefix.dev/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda + sha256: 0bd91de9b447a2991e666f284ae8c722ffb1d84acb594dbd0c031bd656fa32b2 + md5: 70e3400cbbfa03e96dcde7fc13e38c7b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + size: 28424 + timestamp: 1749901812541 + - conda: https://prefix.dev/conda-forge/linux-64/libsndfile-1.2.2-hc7d488a_2.conda + sha256: 57cb5f92110324c04498b96563211a1bca6a74b2918b1e8df578bfed03cc32e4 + md5: 067590f061c9f6ea7e61e3b2112ed6b3 + depends: + - __glibc >=2.17,<3.0.a0 + - lame >=3.100,<3.101.0a0 + - libflac >=1.5.0,<1.6.0a0 + - libgcc >=14 + - libogg >=1.3.5,<1.4.0a0 + - libopus >=1.5.2,<2.0a0 + - libstdcxx >=14 + - libvorbis >=1.3.7,<1.4.0a0 + - mpg123 >=1.32.9,<1.33.0a0 + license: LGPL-2.1-or-later + license_family: LGPL + size: 355619 + timestamp: 1765181778282 + - conda: https://prefix.dev/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + sha256: 78668020064fdaa27e9ab65cd2997e2c837b564ab26ce3bf0e58a2ce1a525c6e + md5: 1b08cd684f34175e4514474793d44bcb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc 15.2.0 he0feb66_18 + constrains: + - libstdcxx-ng ==15.2.0=*_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 5852330 + timestamp: 1771378262446 + - conda: https://prefix.dev/conda-forge/linux-64/libsystemd0-257.10-hd0affe5_4.conda + sha256: f0356bb344a684e7616fc84675cfca6401140320594e8686be30e8ac7547aed2 + md5: 1d4c18d75c51ed9d00092a891a547a7d + depends: + - __glibc >=2.17,<3.0.a0 + - libcap >=2.77,<2.78.0a0 + - libgcc >=14 + license: LGPL-2.1-or-later + size: 491953 + timestamp: 1770738638119 + - conda: https://prefix.dev/conda-forge/linux-64/libudev1-257.10-hd0affe5_4.conda + sha256: ed4d2c01fbeb1330f112f7e399408634db277d3dfb2dec1d0395f56feaa24351 + md5: 6c74fba677b61a0842cbf0f63eee683b + depends: + - __glibc >=2.17,<3.0.a0 + - libcap >=2.77,<2.78.0a0 + - libgcc >=14 + license: LGPL-2.1-or-later + size: 144654 + timestamp: 1770738650966 + - conda: https://prefix.dev/conda-forge/linux-64/libunwind-1.8.3-h65a8314_0.conda + sha256: 71c8b9d5c72473752a0bb6e91b01dd209a03916cb71f36cc6a564e3a2a132d7a + md5: e179a69edd30d75c0144d7a380b88f28 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + size: 75995 + timestamp: 1757032240102 + - conda: https://prefix.dev/conda-forge/linux-64/liburing-2.14-hb700be7_0.conda + sha256: 3d17b7aa90610afc65356e9e6149aeac0b2df19deda73a51f0a09cf04fd89286 + md5: 56f65185b520e016d29d01657ac02c0d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + size: 154203 + timestamp: 1770566529700 + - conda: https://prefix.dev/conda-forge/linux-64/libusb-1.0.29-h73b1eb8_0.conda + sha256: 89c84f5b26028a9d0f5c4014330703e7dff73ba0c98f90103e9cef6b43a5323c + md5: d17e3fb595a9f24fa9e149239a33475d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libudev1 >=257.4 + license: LGPL-2.1-or-later + size: 89551 + timestamp: 1748856210075 + - conda: https://prefix.dev/conda-forge/osx-64/libusb-1.0.29-h2287256_0.conda + sha256: b46c1c71d8be2d19615a10eaa997b3547848d1aee25a7e9486ad1ca8d61626a7 + md5: e5d5fd6235a259665d7652093dc7d6f1 + depends: + - __osx >=10.13 + license: LGPL-2.1-or-later + size: 85523 + timestamp: 1748856209535 + - conda: https://prefix.dev/conda-forge/osx-arm64/libusb-1.0.29-hbc156a2_0.conda + sha256: 5eee9a2bf359e474d4548874bcfc8d29ebad0d9ba015314439c256904e40aaad + md5: f6654e9e96e9d973981b3b2f898a5bfa + depends: + - __osx >=11.0 + license: LGPL-2.1-or-later + size: 83849 + timestamp: 1748856224950 + - conda: https://prefix.dev/conda-forge/win-64/libusb-1.0.29-h1839187_0.conda + sha256: 9837f8e8de20b6c9c033561cd33b4554cd551b217e3b8d2862b353ed2c23d8b8 + md5: a656b2c367405cd24988cf67ff2675aa + depends: + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - ucrt >=10.0.20348.0 + license: LGPL-2.1-or-later + size: 118204 + timestamp: 1748856290542 + - conda: https://prefix.dev/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda + sha256: ca494c99c7e5ecc1b4cd2f72b5584cef3d4ce631d23511184411abcbb90a21a5 + md5: b4ecbefe517ed0157c37f8182768271c + depends: + - libogg + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - libogg >=1.3.5,<1.4.0a0 + license: BSD-3-Clause + license_family: BSD + size: 285894 + timestamp: 1753879378005 + - conda: https://prefix.dev/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + sha256: a68280d57dfd29e3d53400409a39d67c4b9515097eba733aa6fe00c880620e2b + md5: 31ad065eda3c2d88f8215b1289df9c89 + depends: + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxrandr >=1.5.5,<2.0a0 + constrains: + - libvulkan-headers 1.4.341.0.* + license: Apache-2.0 + license_family: APACHE + size: 199795 + timestamp: 1770077125520 + - conda: https://prefix.dev/conda-forge/osx-64/libvulkan-loader-1.4.341.0-ha6bc089_0.conda + sha256: ce9bc992ffffdefbde5f7977b0a3ad9036650f8323611e4024908755891674e0 + md5: dcce6338514e65c2b7fdf172f1264561 + depends: + - __osx >=10.13 + - libcxx >=19 + constrains: + - libvulkan-headers 1.4.341.0.* + license: Apache-2.0 + license_family: APACHE + size: 182703 + timestamp: 1770077140315 + - conda: https://prefix.dev/conda-forge/osx-arm64/libvulkan-loader-1.4.341.0-h3feff0a_0.conda + sha256: d2790dafc9149b1acd45b9033d02cfa3f3e9ee5af97bd61e0a5718c414a0a135 + md5: 6b4c9a5b130759136a0dde0c373cb0ea + depends: + - __osx >=11.0 + - libcxx >=19 + constrains: + - libvulkan-headers 1.4.341.0.* + license: Apache-2.0 + license_family: APACHE + size: 180304 + timestamp: 1770077143460 + - conda: https://prefix.dev/conda-forge/win-64/libvulkan-loader-1.4.341.0-h477610d_0.conda + sha256: 0f0965edca8b255187604fc7712c53fe9064b31a1845a7dfb2b63bf660de84a7 + md5: 804880b2674119b84277d6c16b01677d + depends: + - vc >=14.3,<15 + - vc14_runtime >=14.44.35208 + - ucrt >=10.0.20348.0 + constrains: + - libvulkan-headers 1.4.341.0.* + license: Apache-2.0 + license_family: APACHE + size: 282251 + timestamp: 1770077165680 + - conda: https://prefix.dev/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa + md5: 92ed62436b625154323d40d5f2f11dd7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - pthread-stubs + - xorg-libxau >=1.0.11,<2.0a0 + - xorg-libxdmcp + license: MIT + license_family: MIT + size: 395888 + timestamp: 1727278577118 + - conda: https://prefix.dev/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda + sha256: d2195b5fbcb0af1ff7b345efdf89290c279b8d1d74f325ae0ac98148c375863c + md5: 2bca1fbb221d9c3c8e3a155784bbc2e9 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libxcb >=1.17.0,<2.0a0 + - libxml2 + - libxml2-16 >=2.14.6 + - xkeyboard-config + - xorg-libxau >=1.0.12,<2.0a0 + license: MIT/X11 Derivative + license_family: MIT + size: 837922 + timestamp: 1764794163823 + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-2.15.2-h031cc0b_0.conda + sha256: a9612f88139197b2777a00325c72d872507e70d4f4111021f65e55797f97de67 + md5: 672c49f67192f0a7c2fa55986219d197 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.2,<6.0a0 + - libxml2-16 2.15.2 hf2a90c1_0 + - libzlib >=1.3.1,<2.0a0 + constrains: + - icu <0.0a0 + license: MIT + license_family: MIT + size: 46783 + timestamp: 1772704627225 + - conda: https://prefix.dev/conda-forge/linux-64/libxml2-16-2.15.2-hf2a90c1_0.conda + sha256: 9448a9080c8c2d32964f4005a75bf9f5879e4e3163de23f8efe361f1d6234e2b + md5: e44f52764dd288c96c4676a967b7e112 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.2,<6.0a0 + - libzlib >=1.3.1,<2.0a0 + constrains: + - libxml2 2.15.2 + - icu <0.0a0 + license: MIT + license_family: MIT + size: 556790 + timestamp: 1772704614138 + - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 + md5: edb0dca6bc32e4f4789199455a1dbeb8 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 60963 + timestamp: 1727963148474 + - conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + sha256: 8412f96504fc5993a63edf1e211d042a1fd5b1d51dedec755d2058948fcced09 + md5: 003a54a4e32b02f7355b50a837e699da + depends: + - __osx >=10.13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 57133 + timestamp: 1727963183990 + - conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b + md5: 369964e85dc26bfe78f41399b366c435 + depends: + - __osx >=11.0 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 46438 + timestamp: 1727963202283 + - conda: https://prefix.dev/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda + sha256: 39c4700fb3fbe403a77d8cc27352fa72ba744db487559d5d44bf8411bb4ea200 + md5: c7f302fd11eeb0987a6a5e1f3aed6a21 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + license: LGPL-2.1-only + license_family: LGPL + size: 491140 + timestamp: 1730581373280 + - conda: https://prefix.dev/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda + sha256: 5e6f7d161356fefd981948bea5139c5aa0436767751a6930cb1ca801ebb113ff + md5: 7a3bff861a6583f1889021facefc08b1 + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + size: 1222481 + timestamp: 1763655398280 + - conda: https://prefix.dev/conda-forge/osx-64/pcre2-10.47-h13923f0_0.conda + sha256: 8d64a9d36073346542e5ea042ef8207a45a0069a2e65ce3323ee3146db78134c + md5: 08f970fb2b75f5be27678e077ebedd46 + depends: + - __osx >=10.13 + - bzip2 >=1.0.8,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + size: 1106584 + timestamp: 1763655837207 + - conda: https://prefix.dev/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda + sha256: 5e2e443f796f2fd92adf7978286a525fb768c34e12b1ee9ded4000a41b2894ba + md5: 9b4190c4055435ca3502070186eba53a + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + size: 850231 + timestamp: 1763655726735 + - conda: https://prefix.dev/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 + md5: b3c17d95b5a10c6e64a21fa17573e70e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + size: 8252 + timestamp: 1726802366959 + - conda: https://prefix.dev/conda-forge/linux-64/pulseaudio-client-17.0-h9a6aba3_3.conda + sha256: 0a0858c59805d627d02bdceee965dd84fde0aceab03a2f984325eec08d822096 + md5: b8ea447fdf62e3597cb8d2fae4eb1a90 + depends: + - __glibc >=2.17,<3.0.a0 + - dbus >=1.16.2,<2.0a0 + - libgcc >=14 + - libglib >=2.86.1,<3.0a0 + - libiconv >=1.18,<2.0a0 + - libsndfile >=1.2.2,<1.3.0a0 + - libsystemd0 >=257.10 + - libxcb >=1.17.0,<2.0a0 + constrains: + - pulseaudio 17.0 *_3 + license: LGPL-2.1-or-later + license_family: LGPL + size: 750785 + timestamp: 1763148198088 + - conda: https://prefix.dev/conda-forge/linux-64/sdl2-2.32.56-h54a6638_0.conda + sha256: 987ad072939fdd51c92ea8d3544b286bb240aefda329f9b03a51d9b7e777f9de + md5: cdd138897d94dc07d99afe7113a07bec + depends: + - libstdcxx >=14 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - libgl >=1.7.0,<2.0a0 + - sdl3 >=3.2.22,<4.0a0 + - libegl >=1.7.0,<2.0a0 + license: Zlib + size: 589145 + timestamp: 1757842881 + - conda: https://prefix.dev/conda-forge/osx-64/sdl2-2.32.56-h53ec75d_0.conda + sha256: 3f64f2cabdfe2f4ed8df6adf26a86bd9db07380cb8fa28d18a80040cc8b8b7d9 + md5: 0a8a18995e507da927d1f8c4b7f15ca8 + depends: + - __osx >=10.13 + - libcxx >=19 + - sdl3 >=3.2.22,<4.0a0 + license: Zlib + size: 740066 + timestamp: 1757842955775 + - conda: https://prefix.dev/conda-forge/osx-arm64/sdl2-2.32.56-h248ca61_0.conda + sha256: 704c5cae4bc839a18c70cbf3387d7789f1902828c79c6ddabcd34daf594f4103 + md5: 092c5b693dc6adf5f409d12f33295a2a + depends: + - libcxx >=19 + - __osx >=11.0 + - sdl3 >=3.2.22,<4.0a0 + license: Zlib + size: 542508 + timestamp: 1757842919681 + - conda: https://prefix.dev/conda-forge/win-64/sdl2-2.32.56-h5112557_0.conda + sha256: d17da21386bdbf32bce5daba5142916feb95eed63ef92b285808c765705bbfd2 + md5: 4cffbfebb6614a1bff3fc666527c25c7 + depends: + - vc >=14.3,<15 + - vc14_runtime >=14.44.35208 + - ucrt >=10.0.20348.0 + - vc >=14.3,<15 + - vc14_runtime >=14.44.35208 + - ucrt >=10.0.20348.0 + - sdl3 >=3.2.22,<4.0a0 + license: Zlib + size: 572101 + timestamp: 1757842925694 + - conda: https://prefix.dev/conda-forge/linux-64/sdl3-3.4.2-hdeec2a5_0.conda + sha256: 64b982664550e01c25f8f09333c0ee54d4764a80fe8636b8aaf881fe6e8a0dbe + md5: 88a69db027a8ff59dab972a09d69a1ab + depends: + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - xorg-libxscrnsaver >=1.2.4,<2.0a0 + - libdrm >=2.4.125,<2.5.0a0 + - xorg-libxfixes >=6.0.2,<7.0a0 + - libudev1 >=257.10 + - pulseaudio-client >=17.0,<17.1.0a0 + - xorg-libxtst >=1.2.5,<2.0a0 + - libegl >=1.7.0,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 + - xorg-libxcursor >=1.2.3,<2.0a0 + - xorg-libx11 >=1.8.13,<2.0a0 + - liburing >=2.14,<2.15.0a0 + - libxkbcommon >=1.13.1,<2.0a0 + - libunwind >=1.8.3,<1.9.0a0 + - libusb >=1.0.29,<2.0a0 + - dbus >=1.16.2,<2.0a0 + - xorg-libxext >=1.3.7,<2.0a0 + - libgl >=1.7.0,<2.0a0 + - xorg-libxi >=1.8.2,<2.0a0 + - wayland >=1.24.0,<2.0a0 + license: Zlib + size: 2138749 + timestamp: 1771668185803 + - conda: https://prefix.dev/conda-forge/osx-64/sdl3-3.4.2-hf9078ff_0.conda + sha256: b3939796f728f52be95a0f95c89bfd890af3a613fe7f6ab17c9fb6ea477812e8 + md5: bb826b1b04460daaef656f99432a5770 + depends: + - __osx >=11.0 + - libcxx >=19 + - dbus >=1.16.2,<2.0a0 + - libusb >=1.0.29,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 + license: Zlib + size: 1696561 + timestamp: 1771668206362 + - conda: https://prefix.dev/conda-forge/osx-arm64/sdl3-3.4.2-h6fa9c73_0.conda + sha256: e0589f700a9e9c188ba54c7ba5482885dc2e025f01de30fab098896cd6fda0a3 + md5: 5e999442b4391dcd702f6026ac1a23f2 + depends: + - libcxx >=19 + - __osx >=11.0 + - libusb >=1.0.29,<2.0a0 + - libvulkan-loader >=1.4.341.0,<2.0a0 + - dbus >=1.16.2,<2.0a0 + license: Zlib + size: 1556104 + timestamp: 1771668215375 + - conda: https://prefix.dev/conda-forge/win-64/sdl3-3.4.2-h5112557_0.conda + sha256: a4677774a9d542c6f4bac8779a2d7105748d38d8b7d56c8d02f36d14fba471b9 + md5: a0256884d35489e520360267e67ce3fc + depends: + - vc >=14.3,<15 + - vc14_runtime >=14.44.35208 + - ucrt >=10.0.20348.0 + - libvulkan-loader >=1.4.341.0,<2.0a0 + - libusb >=1.0.29,<2.0a0 + license: Zlib + size: 1669623 + timestamp: 1771668231217 + - conda_source: sdl_example[6711f831] @ . + variants: + cxx_compiler: vs2022 + target_platform: win-64 + depends: + - vc >=14.3,<15 + - vc14_runtime >=14.44.35208 + - ucrt >=10.0.20348.0 + - sdl2 >=2.32.56,<3.0a0 + - conda_source: sdl_example[bd431642] @ . + variants: + target_platform: linux-64 + depends: + - libstdcxx >=15 + - libgcc >=15 + - sdl2 >=2.32.56,<3.0a0 + - conda_source: sdl_example[2815fc71] @ . + variants: + target_platform: osx-64 + depends: + - libcxx >=22 + - sdl2 >=2.32.56,<3.0a0 + - conda_source: sdl_example[f88715ba] @ . + variants: + target_platform: osx-arm64 + depends: + - libcxx >=22 + - sdl2 >=2.32.56,<3.0a0 + - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.26100.0-h57928b3_0.conda + sha256: 3005729dce6f3d3f5ec91dfc49fc75a0095f9cd23bab49efb899657297ac91a5 + md5: 71b24316859acd00bdb8b38f5e2ce328 + constrains: + - vc14_runtime >=14.29.30037 + - vs2015_runtime >=14.29.30037 + license: LicenseRef-MicrosoftWindowsSDK10 + size: 694692 + timestamp: 1756385147981 + - conda: https://prefix.dev/conda-forge/win-64/vc-14.3-h41ae7f8_34.conda + sha256: 9dc40c2610a6e6727d635c62cced5ef30b7b30123f5ef67d6139e23d21744b3a + md5: 1e610f2416b6acdd231c5f573d754a0f + depends: + - vc14_runtime >=14.44.35208 + track_features: + - vc14 + license: BSD-3-Clause + license_family: BSD + size: 19356 + timestamp: 1767320221521 + - conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.44.35208-h818238b_34.conda + sha256: 02732f953292cce179de9b633e74928037fa3741eb5ef91c3f8bae4f761d32a5 + md5: 37eb311485d2d8b2c419449582046a42 + depends: + - ucrt >=10.0.20348.0 + - vcomp14 14.44.35208 h818238b_34 + constrains: + - vs2015_runtime 14.44.35208.* *_34 + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime + license_family: Proprietary + size: 683233 + timestamp: 1767320219644 + - conda: https://prefix.dev/conda-forge/win-64/vcomp14-14.44.35208-h818238b_34.conda + sha256: 878d5d10318b119bd98ed3ed874bd467acbe21996e1d81597a1dbf8030ea0ce6 + md5: 242d9f25d2ae60c76b38a5e42858e51d + depends: + - ucrt >=10.0.20348.0 + constrains: + - vs2015_runtime 14.44.35208.* *_34 + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime + license_family: Proprietary + size: 115235 + timestamp: 1767320173250 + - conda: https://prefix.dev/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda + sha256: 3aa04ae8e9521d9b56b562376d944c3e52b69f9d2a0667f77b8953464822e125 + md5: 035da2e4f5770f036ff704fa17aace24 + depends: + - __glibc >=2.17,<3.0.a0 + - libexpat >=2.7.1,<3.0a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + size: 329779 + timestamp: 1761174273487 + - conda: https://prefix.dev/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + sha256: 19c2bb14bec84b0e995b56b752369775c75f1589314b43733948bb5f471a6915 + md5: b56e0c8432b56decafae7e78c5f29ba5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.13,<2.0a0 + license: MIT + license_family: MIT + size: 399291 + timestamp: 1772021302485 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + sha256: 516d4060139dbb4de49a4dcdc6317a9353fb39ebd47789c14e6fe52de0deee42 + md5: 861fb6ccbc677bb9a9fb2468430b9c6a + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libxcb >=1.17.0,<2.0a0 + license: MIT + license_family: MIT + size: 839652 + timestamp: 1770819209719 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda + sha256: 6bc6ab7a90a5d8ac94c7e300cc10beb0500eeba4b99822768ca2f2ef356f731b + md5: b2895afaf55bf96a8c8282a2e47a5de0 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + size: 15321 + timestamp: 1762976464266 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda + sha256: 832f538ade441b1eee863c8c91af9e69b356cd3e9e1350fff4fe36cc573fc91a + md5: 2ccd714aa2242315acaf0a67faea780b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + - xorg-libxrender >=0.9.11,<0.10.0a0 + license: MIT + license_family: MIT + size: 32533 + timestamp: 1730908305254 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda + sha256: 25d255fb2eef929d21ff660a0c687d38a6d2ccfbcbf0cc6aa738b12af6e9d142 + md5: 1dafce8548e38671bea82e3f5c6ce22f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + size: 20591 + timestamp: 1762976546182 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + sha256: 79c60fc6acfd3d713d6340d3b4e296836a0f8c51602327b32794625826bd052f + md5: 34e54f03dfea3e7a2dcf1453a85f1085 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + license: MIT + license_family: MIT + size: 50326 + timestamp: 1769445253162 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda + sha256: 83c4c99d60b8784a611351220452a0a85b080668188dce5dfa394b723d7b64f4 + md5: ba231da7fccf9ea1e768caf5c7099b84 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + license: MIT + license_family: MIT + size: 20071 + timestamp: 1759282564045 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a + md5: 17dcc85db3c7886650b8908b183d6876 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + license: MIT + license_family: MIT + size: 47179 + timestamp: 1727799254088 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + sha256: 80ed047a5cb30632c3dc5804c7716131d767089f65877813d4ae855ee5c9d343 + md5: e192019153591938acf7322b6459d36e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrender >=0.9.12,<0.10.0a0 + license: MIT + license_family: MIT + size: 30456 + timestamp: 1769445263457 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda + sha256: 044c7b3153c224c6cedd4484dd91b389d2d7fd9c776ad0f4a34f099b3389f4a1 + md5: 96d57aba173e878a2089d5638016dc5e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + license: MIT + license_family: MIT + size: 33005 + timestamp: 1734229037766 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxscrnsaver-1.2.4-hb9d3cd8_0.conda + sha256: 58e8fc1687534124832d22e102f098b5401173212ac69eb9fd96b16a3e2c8cb2 + md5: 303f7a0e9e0cd7d250bb6b952cecda90 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + license: MIT + license_family: MIT + size: 14412 + timestamp: 1727899730073 + - conda: https://prefix.dev/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a + md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxi >=1.7.10,<2.0a0 + license: MIT + license_family: MIT + size: 32808 + timestamp: 1727964811275 From 97933f1ffe5721285f5fc68f705ec517fa07aedb Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Thu, 12 Mar 2026 16:41:34 +0100 Subject: [PATCH 12/15] fix: remove unused dependencies --- Cargo.lock | 2 -- crates/pixi_cli/Cargo.toml | 2 -- 2 files changed, 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3abec9631a..879536c7f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6004,7 +6004,6 @@ dependencies = [ "miette-arborium", "pathdiff", "pep508_rs", - "percent-encoding", "pixi_api", "pixi_auth", "pixi_build_frontend", @@ -6058,7 +6057,6 @@ dependencies = [ "uv-configuration", "uv-pep508", "uv-pypi-types", - "uv-redacted", "uv-requirements-txt", "which", "zip 2.4.2", diff --git a/crates/pixi_cli/Cargo.toml b/crates/pixi_cli/Cargo.toml index a7801e1180..3c79eab7d3 100644 --- a/crates/pixi_cli/Cargo.toml +++ b/crates/pixi_cli/Cargo.toml @@ -41,7 +41,6 @@ miette = { workspace = true, features = ["fancy-no-backtrace"] } miette-arborium = { version = "2.5.0", features = ["lang-toml"] } pathdiff = { workspace = true } pep508_rs = { workspace = true } -percent-encoding = { workspace = true } pixi_api = { workspace = true } pixi_auth = { workspace = true } pixi_build_frontend = { workspace = true } @@ -97,7 +96,6 @@ uv-client = { workspace = true } uv-configuration = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } -uv-redacted = { workspace = true } uv-requirements-txt = { workspace = true } which = { workspace = true } zip = { workspace = true, features = ["deflate", "time"] } From a54630d2280d1e7fe6e77110d307aed5792c0dd8 Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Fri, 13 Mar 2026 11:58:05 +0100 Subject: [PATCH 13/15] fix: dont resolve in updatecontext --- Cargo.lock | 22 ++ .../tests/integration_rust/build_tests.rs | 267 ++++++++++++++++++ crates/pixi_core/src/lock_file/mod.rs | 2 +- .../src/lock_file/records_by_name.rs | 30 +- crates/pixi_core/src/lock_file/update.rs | 105 +++---- .../test_specified_build_source/test_git.py | 11 +- 6 files changed, 371 insertions(+), 66 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 879536c7f7..4eeb19d9ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,6 +1695,7 @@ dependencies = [ [[package]] name = "coalesced_map" version = "0.1.2" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "dashmap", "tokio", @@ -2741,6 +2742,7 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "file_url" version = "0.2.7" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "itertools 0.14.0", "percent-encoding", @@ -5406,6 +5408,7 @@ checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" [[package]] name = "path_resolver" version = "0.2.7" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "fs-err", @@ -7417,6 +7420,7 @@ dependencies = [ [[package]] name = "rattler" version = "0.40.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "clap", @@ -7730,6 +7734,7 @@ dependencies = [ [[package]] name = "rattler_cache" version = "0.6.16" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -7761,6 +7766,7 @@ dependencies = [ [[package]] name = "rattler_conda_types" version = "0.44.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7802,6 +7808,7 @@ dependencies = [ [[package]] name = "rattler_config" version = "0.3.4" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "console 0.16.3", "fs-err", @@ -7818,6 +7825,7 @@ dependencies = [ [[package]] name = "rattler_digest" version = "1.2.3" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "blake2", "digest", @@ -7894,6 +7902,7 @@ dependencies = [ [[package]] name = "rattler_lock" version = "0.27.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "chrono", @@ -7919,6 +7928,7 @@ dependencies = [ [[package]] name = "rattler_macros" version = "1.0.12" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "quote", "syn", @@ -7927,6 +7937,7 @@ dependencies = [ [[package]] name = "rattler_menuinst" version = "0.2.51" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "configparser", @@ -7956,6 +7967,7 @@ dependencies = [ [[package]] name = "rattler_networking" version = "0.26.4" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "async-once-cell", @@ -7988,6 +8000,7 @@ dependencies = [ [[package]] name = "rattler_package_streaming" version = "0.24.4" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "astral-tokio-tar 0.6.0", "astral_async_zip", @@ -8039,6 +8052,7 @@ dependencies = [ [[package]] name = "rattler_pty" version = "0.2.9" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "libc", "nix 0.30.1", @@ -8049,6 +8063,7 @@ dependencies = [ [[package]] name = "rattler_redaction" version = "0.1.13" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "reqwest 0.12.28", "reqwest-middleware", @@ -8058,6 +8073,7 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" version = "0.27.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "ahash", "anyhow", @@ -8119,6 +8135,7 @@ dependencies = [ [[package]] name = "rattler_s3" version = "0.1.27" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "aws-config", "aws-credential-types", @@ -8134,6 +8151,7 @@ dependencies = [ [[package]] name = "rattler_shell" version = "0.26.4" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "anyhow", "enum_dispatch", @@ -8153,6 +8171,7 @@ dependencies = [ [[package]] name = "rattler_solve" version = "5.0.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "chrono", "futures", @@ -8170,6 +8189,7 @@ dependencies = [ [[package]] name = "rattler_upload" version = "0.5.1" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "base64 0.22.1", "clap", @@ -8205,6 +8225,7 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" version = "2.3.13" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "archspec", "libloading", @@ -9872,6 +9893,7 @@ dependencies = [ [[package]] name = "simple_spawn_blocking" version = "1.1.0" +source = "git+https://github.com/conda/rattler?branch=feature%2Flockfile-v7#a6be5f714ff13cb6c7f5bc4981d601ab2b1565b1" dependencies = [ "tokio", ] diff --git a/crates/pixi/tests/integration_rust/build_tests.rs b/crates/pixi/tests/integration_rust/build_tests.rs index 67cef5ad71..3efc837aa0 100644 --- a/crates/pixi/tests/integration_rust/build_tests.rs +++ b/crates/pixi/tests/integration_rust/build_tests.rs @@ -1037,3 +1037,270 @@ noarch = false "conda_outputs should NOT be called again after cache is updated" ); } + +/// Test that demonstrates a bug with unresolvable partial source records. +/// +/// When a lock-file contains partial source records (from mutable path sources) +/// and the source package changes in a way that makes the partial record +/// unresolvable (e.g., the package is renamed), the update flow should gracefully +/// re-solve instead of erroring out. +/// +/// The bug: `UpdateContext::finish()` tries to resolve ALL partial records from +/// the lock-file (including from environments already marked as out-of-date). +/// If resolution fails, it produces a hard error instead of proceeding with +/// the re-solve. +#[tokio::test] +async fn test_update_lock_file_with_unresolvable_partial_source_record() { + setup_tracing(); + + // Use an in-memory backend override so we don't need a real build backend. + let backend_override = BackendOverride::from_memory(PassthroughBackend::instantiator()); + let pixi = PixiControl::new() + .unwrap() + .with_backend_override(backend_override); + + // Create a source package directory with an initial name + let source_dir = pixi.workspace_path().join("my-package"); + fs::create_dir_all(&source_dir).unwrap(); + + let source_pixi_toml = r#" +[package] +name = "my-package" +version = "1.0.0" + +[package.build] +backend = { name = "in-memory", version = "0.1.0" } +"#; + fs::write(source_dir.join("pixi.toml"), source_pixi_toml).unwrap(); + + // Create the workspace manifest + let manifest_content = format!( + r#" +[workspace] +channels = [] +platforms = ["{}"] +preview = ["pixi-build"] + +[dependencies] +my-package = {{ path = "./my-package" }} +"#, + Platform::current() + ); + fs::write(pixi.manifest_path(), manifest_content).unwrap(); + + // First invocation: Generate the lock-file. + // This creates a lock-file where path source records are stored as partial + // (mutable sources are downgraded to partial on write). + let workspace = pixi.workspace().unwrap(); + let (_lock_file_data, was_updated) = workspace + .update_lock_file(pixi_core::UpdateLockFileOptions::default()) + .await + .expect("First lock file generation should succeed"); + assert!(was_updated, "First invocation should create the lock-file"); + + // Now rename the package in the child manifest. The lock-file on disk still + // has a partial record for "my-package", but the source now produces + // metadata for "renamed-package". This makes the old partial record + // unresolvable (name mismatch). + let renamed_pixi_toml = r#" +[package] +name = "renamed-package" +version = "1.0.0" + +[package.build] +backend = { name = "in-memory", version = "0.1.0" } +"#; + fs::write(source_dir.join("pixi.toml"), renamed_pixi_toml).unwrap(); + + // Also update the workspace manifest to reference the new name + let updated_manifest = format!( + r#" +[workspace] +channels = [] +platforms = ["{}"] +preview = ["pixi-build"] + +[dependencies] +renamed-package = {{ path = "./my-package" }} +"#, + Platform::current() + ); + fs::write(pixi.manifest_path(), updated_manifest).unwrap(); + + // Second invocation: Update the lock-file. + // + // The satisfiability check correctly identifies the lock-file as out-of-date + // (the old "my-package" partial record can't be resolved because the source + // now produces "renamed-package"). However, `UpdateContext::finish()` also + // tries to resolve ALL partial records from the old lock-file (including + // the unresolvable one) and fails with a hard error. + // + // This SHOULD succeed — the system should re-solve and produce a new + // lock-file with "renamed-package". + let workspace = pixi.workspace().unwrap(); + let result = workspace + .update_lock_file(pixi_core::UpdateLockFileOptions::default()) + .await; + + match result { + Ok(_) => { + // This is the expected behavior — the system should gracefully + // re-solve and produce a new lock-file with "renamed-package". + } + Err(e) => { + panic!( + "Updating the lock-file after renaming a source package should succeed, \ + but it failed with: {e}" + ); + } + } +} + +/// Test that source records (including their metadata) survive a lock-file +/// roundtrip through `UnresolvedPixiRecord`. +/// +/// On the first lock, the solver produces a full source record. On write, path- +/// based sources are downgraded to partial. On the second lock, the partial +/// record is read back as `UnresolvedPixiRecord`, the satisfiability check +/// re-evaluates it, and the lock-file is written again. The source package +/// should be present and equivalent in both lock-files. +#[tokio::test] +async fn test_source_record_roundtrips_through_lock_file() { + setup_tracing(); + + let backend_override = BackendOverride::from_memory(PassthroughBackend::instantiator()); + let pixi = PixiControl::new() + .unwrap() + .with_backend_override(backend_override); + + // Create a source package directory + let source_dir = pixi.workspace_path().join("my-package"); + fs::create_dir_all(&source_dir).unwrap(); + + let source_pixi_toml = r#" +[package] +name = "my-package" +version = "1.0.0" + +[package.build] +backend = { name = "in-memory", version = "0.1.0" } +"#; + fs::write(source_dir.join("pixi.toml"), source_pixi_toml).unwrap(); + + // Create the workspace manifest + let manifest_content = format!( + r#" +[workspace] +channels = [] +platforms = ["{}"] +preview = ["pixi-build"] + +[dependencies] +my-package = {{ path = "./my-package" }} +"#, + Platform::current() + ); + fs::write(pixi.manifest_path(), manifest_content).unwrap(); + + // First lock + let workspace = pixi.workspace().unwrap(); + let (lock_file_data, _) = workspace + .update_lock_file(pixi_core::UpdateLockFileOptions::default()) + .await + .expect("First lock should succeed"); + + let lock_file = lock_file_data.into_lock_file(); + + // Find the source package in the lock-file. + let env = lock_file + .environment(consts::DEFAULT_ENVIRONMENT_NAME) + .expect("default environment should exist"); + let platform = lock_file + .platform(&Platform::current().to_string()) + .expect("current platform should exist"); + + let source_packages: Vec<_> = env + .packages(platform) + .into_iter() + .flatten() + .filter_map(|p| p.as_source_conda()) + .collect(); + + assert!( + !source_packages.is_empty(), + "Expected at least one source package in the lock-file" + ); + + // Verify the source package location and metadata are present + let my_pkg = source_packages + .iter() + .find(|p| { + p.metadata + .as_full() + .is_some_and(|f| f.package_record.name.as_normalized() == "my-package") + || p.metadata + .as_partial() + .is_some_and(|part| part.name.as_normalized() == "my-package") + }) + .expect("my-package should be in source packages"); + + // The location should point to the source directory + let location_str = my_pkg.location.to_string(); + assert!( + location_str.contains('.'), + "Source package location should be a relative path, got: {location_str}" + ); + + // Second lock: records roundtrip through UnresolvedPixiRecord + let workspace = pixi.workspace().unwrap(); + let (lock_file_data_2, was_updated) = workspace + .update_lock_file(pixi_core::UpdateLockFileOptions::default()) + .await + .expect("Second lock should succeed"); + + assert!( + !was_updated, + "Second lock invocation should not update the lock-file" + ); + + let lock_file_2 = lock_file_data_2.into_lock_file(); + let env_2 = lock_file_2 + .environment(consts::DEFAULT_ENVIRONMENT_NAME) + .unwrap(); + let platform_2 = lock_file_2 + .platform(&Platform::current().to_string()) + .unwrap(); + + let source_packages_2: Vec<_> = env_2 + .packages(platform_2) + .into_iter() + .flatten() + .filter_map(|p| p.as_source_conda()) + .collect(); + + let my_pkg_2 = source_packages_2 + .iter() + .find(|p| { + p.metadata + .as_full() + .is_some_and(|f| f.package_record.name.as_normalized() == "my-package") + || p.metadata + .as_partial() + .is_some_and(|part| part.name.as_normalized() == "my-package") + }) + .expect("my-package should still be in source packages after roundtrip"); + + // Location should be preserved + assert_eq!( + my_pkg.location.to_string(), + my_pkg_2.location.to_string(), + "Source package location should be identical after roundtrip" + ); + + // package_build_source should be preserved (None == None for path deps + // without [package.build.source], or Some == Some for git/url sources) + assert_eq!( + my_pkg.package_build_source, my_pkg_2.package_build_source, + "package_build_source should be identical after roundtrip" + ); +} diff --git a/crates/pixi_core/src/lock_file/mod.rs b/crates/pixi_core/src/lock_file/mod.rs index a86092820c..5226852680 100644 --- a/crates/pixi_core/src/lock_file/mod.rs +++ b/crates/pixi_core/src/lock_file/mod.rs @@ -16,7 +16,7 @@ use pixi_record::PixiRecord; pub use pixi_uv_context::UvResolutionContext; use rattler_lock::PypiPackageData; pub use rattler_lock::Verbatim; -pub use records_by_name::{PixiRecordsByName, PypiRecordsByName}; +pub use records_by_name::{PixiRecordsByName, PypiRecordsByName, UnresolvedPixiRecordsByName}; pub use resolve::pypi::resolve_pypi; pub use satisfiability::{ Dependency, EnvironmentUnsat, PlatformUnsat, resolve_dev_dependencies, diff --git a/crates/pixi_core/src/lock_file/records_by_name.rs b/crates/pixi_core/src/lock_file/records_by_name.rs index d0f44dedfd..cdd5a037af 100644 --- a/crates/pixi_core/src/lock_file/records_by_name.rs +++ b/crates/pixi_core/src/lock_file/records_by_name.rs @@ -1,6 +1,6 @@ use super::package_identifier::ConversionError; use crate::lock_file::{PypiPackageData, PypiPackageIdentifier}; -use pixi_record::PixiRecord; +use pixi_record::{PixiRecord, UnresolvedPixiRecord}; use pixi_uv_conversions::to_uv_normalize; use pypi_modifiers::pypi_tags::is_python_record; use rattler_conda_types::{PackageName, RepoDataRecord, VersionWithSource}; @@ -10,6 +10,7 @@ use std::hash::Hash; pub type PypiRecordsByName = DependencyRecordsByName; pub type PixiRecordsByName = DependencyRecordsByName; +pub type UnresolvedPixiRecordsByName = DependencyRecordsByName; /// A trait required from the dependencies stored in DependencyRecordsByName pub trait HasNameVersion { @@ -62,6 +63,19 @@ impl HasNameVersion for PixiRecord { } } +impl HasNameVersion for UnresolvedPixiRecord { + type N = PackageName; + type V = VersionWithSource; + + fn name(&self) -> &Self::N { + UnresolvedPixiRecord::name(self) + } + + fn version(&self) -> Option<&Self::V> { + self.package_record().map(|pr| &pr.version) + } +} + /// A struct that holds both a ``Vec` of `DependencyRecord` and a mapping from /// name to index. #[derive(Clone, Debug)] @@ -222,6 +236,20 @@ impl PixiRecordsByName { } } +impl UnresolvedPixiRecordsByName { + /// Converts to a [`PixiRecordsByName`] on a best-effort basis. + /// + /// Binary records and full source records are converted; partial source + /// records (whose metadata is incomplete) are silently dropped. + pub(crate) fn into_resolved_best_effort(self) -> PixiRecordsByName { + PixiRecordsByName::from_iter( + self.records + .into_iter() + .filter_map(|r| r.try_into_resolved().ok()), + ) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/pixi_core/src/lock_file/update.rs b/crates/pixi_core/src/lock_file/update.rs index d8a6ccb1e2..a4bb963df9 100644 --- a/crates/pixi_core/src/lock_file/update.rs +++ b/crates/pixi_core/src/lock_file/update.rs @@ -57,7 +57,7 @@ use uv_normalize::ExtraName; use super::{ CondaPrefixUpdater, InstallSubset, PixiRecordsByName, PypiRecordsByName, - outdated::OutdatedEnvironments, utils::IoConcurrencyLimit, + UnresolvedPixiRecordsByName, outdated::OutdatedEnvironments, utils::IoConcurrencyLimit, }; use crate::{ Workspace, @@ -1231,10 +1231,13 @@ pub struct UpdateContext<'p> { /// Repodata records from the lock-file. This contains the records that /// actually exist in the lock-file. If the lock-file is missing or /// partially missing then the data also won't exist in this field. - locked_repodata_records: PerEnvironmentAndPlatform<'p, Arc>, + /// + /// Records may be unresolved (partial source records from mutable path + /// sources). They are resolved lazily only when needed. + locked_repodata_records: PerEnvironmentAndPlatform<'p, Arc>, /// Repodata records from the lock-file grouped by solve-group. - locked_grouped_repodata_records: PerGroupAndPlatform<'p, Arc>, + locked_grouped_repodata_records: PerGroupAndPlatform<'p, Arc>, /// Pypi records from the lock-file grouped by solve-group. locked_grouped_pypi_records: PerGroupAndPlatform<'p, Arc>, @@ -1322,14 +1325,16 @@ impl<'p> UpdateContext<'p> { return Some((async move { pending_records.wait().await.clone() }).left_future()); } - // Otherwise read the records directly from the lock-file. + // Otherwise read the records directly from the lock-file, converting + // unresolved records to resolved on a best-effort basis (partial source + // records are dropped — they have no version/PackageRecord anyway). let locked_records = self .locked_grouped_repodata_records .get(group) - .and_then(|records| records.get(&platform))? - .clone(); + .and_then(|records| records.get(&platform))?; + let resolved = Arc::new(locked_records.as_ref().clone().into_resolved_best_effort()); - Some(ready(locked_records).right_future()) + Some(ready(resolved).right_future()) } /// Returns a future that will resolve to the solved pypi records for the @@ -1362,22 +1367,31 @@ impl<'p> UpdateContext<'p> { &mut self, environment: &Environment<'p>, platform: Platform, - ) -> Option { + ) -> Option { self.solved_repodata_records .get_mut(environment) .and_then(|records| records.remove(&platform)) .map(|cell| { - Arc::into_inner(cell) + let solved = Arc::into_inner(cell) .expect("records must not be shared") .into_inner() - .expect("records must be available") + .expect("records must be available"); + // Convert solved PixiRecords into UnresolvedPixiRecords so the + // return type is uniform with locked (potentially partial) records. + let solved = Arc::try_unwrap(solved).unwrap_or_else(|arc| (*arc).clone()); + UnresolvedPixiRecordsByName::from_iter( + solved + .into_inner() + .into_iter() + .map(UnresolvedPixiRecord::from), + ) }) .or_else(|| { self.locked_repodata_records .get_mut(environment) .and_then(|records| records.remove(&platform)) + .map(|records| Arc::try_unwrap(records).unwrap_or_else(|arc| (*arc).clone())) }) - .map(|records| Arc::try_unwrap(records).unwrap_or_else(|arc| (*arc).clone())) } /// Takes the latest pypi records for the given environment and platform. @@ -1572,11 +1586,9 @@ impl<'p> UpdateContextBuilder<'p> { }; // Extract the current conda records from the lock-file. - // First collect as UnresolvedPixiRecord (sync), then resolve partials (async). let workspace_root = project.root(); - let command_dispatcher = &self.command_dispatcher; - // Step 1: Collect unresolved records per environment and platform (sync). + // Collect unresolved records per environment and platform. #[allow(clippy::type_complexity)] let unresolved_by_env: Vec<( crate::workspace::Environment<'_>, @@ -1604,56 +1616,21 @@ impl<'p> UpdateContextBuilder<'p> { .collect::, ParseLockFileError>>() .into_diagnostic()?; - // Step 2: Resolve partials async. + // Step 2: Store the unresolved records directly. Partial source records + // are kept as-is and resolved lazily only when needed. This avoids a + // hard error when a partial record cannot be resolved (e.g. after a + // package rename) — the outdated environment will be re-solved anyway. let mut locked_repodata_records: HashMap< crate::workspace::Environment<'_>, - HashMap>, + HashMap>, > = HashMap::new(); for (env, platform_records) in unresolved_by_env { let mut env_map = HashMap::new(); for (platform, unresolved) in platform_records { - // Check if any records are partial. - let has_partials = unresolved.iter().any(|r| r.is_partial()); - let resolved = if has_partials { - let channel_config = env.channel_config(); - let channels: Vec = env - .channels() - .into_iter() - .cloned() - .map(|c| c.into_base_url(&channel_config)) - .collect::, _>>() - .into_diagnostic()?; - let VariantConfig { - variant_configuration, - variant_files, - } = project.variants(platform).into_diagnostic()?; - let virtual_packages: Vec = env - .virtual_packages(platform) - .into_iter() - .map(GenericVirtualPackage::from) - .collect(); - resolve_unresolved_records( - unresolved, - command_dispatcher, - channel_config, - channels, - variant_configuration, - variant_files, - virtual_packages, - platform, - ) - .await? - } else { - // All records are full — resolve without async. - unresolved - .into_iter() - .map(|r| { - r.try_into_resolved() - .expect("all records verified as non-partial") - }) - .collect() - }; - env_map.insert(platform, Arc::new(PixiRecordsByName::from_iter(resolved))); + env_map.insert( + platform, + Arc::new(UnresolvedPixiRecordsByName::from_iter(unresolved)), + ); } locked_repodata_records.insert(env, env_map); } @@ -1729,7 +1706,10 @@ impl<'p> UpdateContextBuilder<'p> { by_platform .into_iter() .map(|(platform, records)| { - (platform, Arc::new(PixiRecordsByName::from_iter(records))) + ( + platform, + Arc::new(UnresolvedPixiRecordsByName::from_iter(records)), + ) }) .collect() } @@ -1909,11 +1889,16 @@ impl<'p> UpdateContext<'p> { continue; } // No, we need to spawn a task to update for the entire solve group. + // Convert to resolved records on a best-effort basis: partial + // source records are dropped (the solver only uses binary records + // from `installed` anyway). let locked_group_records = self .locked_grouped_repodata_records .get(&source) .and_then(|records| records.get(&platform)) - .cloned() + .map(|unresolved| { + Arc::new(unresolved.as_ref().clone().into_resolved_best_effort()) + }) .unwrap_or_default(); // Spawn a task to solve the group. diff --git a/tests/integration_python/pixi_build/test_specified_build_source/test_git.py b/tests/integration_python/pixi_build/test_specified_build_source/test_git.py index 97764c84d6..9645e42e89 100644 --- a/tests/integration_python/pixi_build/test_specified_build_source/test_git.py +++ b/tests/integration_python/pixi_build/test_specified_build_source/test_git.py @@ -30,8 +30,8 @@ def iter_entries() -> Any: for entry in iter_entries(): if isinstance(entry, dict): entry = cast(dict[str, Any], entry) - if (v := entry.get("source")) and v.endswith("@ ."): - package_build_source = entry.get("package_build_source") + if (v := entry.get("conda_source")) and v.endswith("@ ."): + package_build_source = entry.get("source") if package_build_source is not None: serialized_sources.append(package_build_source) @@ -413,8 +413,11 @@ def test_git_path_lock_detects_manual_rev_change( def mutate(node: Any) -> None: if isinstance(node, dict): node = cast(dict[str, Any], node) - if (v := node.get("source")) and v.endswith("@ ."): - node["package_build_source"]["rev"] = local_cpp_git_repo.other_feature_rev + # In v7 lock files, environment entries are short references + # (only `conda_source` key) while the full package data (with + # `source`) lives in the top-level `packages` list. + if (v := node.get("conda_source")) and v.endswith("@ .") and "source" in node: + node["source"]["rev"] = local_cpp_git_repo.other_feature_rev for value in node.values(): mutate(value) elif isinstance(node, list): From da5041efd456ad85de7f070ba3df4acdd854e0df Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Fri, 13 Mar 2026 16:09:16 +0100 Subject: [PATCH 14/15] fix: disable-jlap is now unused key --- crates/pixi_config/src/lib.rs | 113 ++++++++++-------- ..._config__tests__config_merge_multiple.snap | 3 - .../pixi_install_pypi/src/plan/validation.rs | 9 -- 3 files changed, 66 insertions(+), 59 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index d5c2b573b9..93175d9c07 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -221,8 +221,7 @@ impl ConfigCliPrompt { } } -#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "kebab-case")] +#[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)] pub struct RepodataConfig { #[serde(flatten)] pub default: RepodataChannelConfig, @@ -231,6 +230,62 @@ pub struct RepodataConfig { pub per_channel: HashMap, } +impl<'de> Deserialize<'de> for RepodataConfig { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct RepodataConfigVisitor; + + impl<'de> serde::de::Visitor<'de> for RepodataConfigVisitor { + type Value = RepodataConfig; + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.write_str("a repodata config map") + } + + fn visit_map(self, mut access: M) -> Result + where + M: serde::de::MapAccess<'de>, + { + let mut default = RepodataChannelConfig::default(); + let mut per_channel = HashMap::new(); + + while let Some(key) = access.next_key::()? { + match key.as_str() { + "disable-bzip2" | "disable_bzip2" => { + default.disable_bzip2 = Some(access.next_value()?); + } + "disable-zstd" | "disable_zstd" => { + default.disable_zstd = Some(access.next_value()?); + } + "disable-sharded" | "disable_sharded" => { + default.disable_sharded = Some(access.next_value()?); + } + other => { + if let Ok(url) = Url::parse(other) { + per_channel.insert(url, access.next_value()?); + } else { + // Unknown/deprecated keys (e.g. `disable-jlap`) are + // silently ignored. `serde_ignored` will report them + // as unused so the "Ignoring '…'" warning fires. + let _: serde::de::IgnoredAny = access.next_value()?; + } + } + } + } + + Ok(RepodataConfig { + default, + per_channel, + }) + } + } + + deserializer.deserialize_map(RepodataConfigVisitor) + } +} + impl RepodataConfig { pub fn is_empty(&self) -> bool { self.default.is_empty() && self.per_channel.is_empty() @@ -283,14 +338,8 @@ impl ConfigCliActivation { } #[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Eq)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] +#[serde(rename_all = "kebab-case")] pub struct RepodataChannelConfig { - /// Deprecated: JLAP support has been removed. This field is kept only so - /// existing config files continue to parse. A deprecation warning is - /// emitted when set. - #[serde(alias = "disable_jlap")] // BREAK: remove to stop supporting snake_case alias - #[serde(skip_serializing_if = "Option::is_none")] - pub disable_jlap: Option, /// Disable bzip2 compression for repodata. #[serde(alias = "disable_bzip2")] // BREAK: remove to stop supporting snake_case alias #[serde(skip_serializing_if = "Option::is_none")] @@ -306,15 +355,13 @@ pub struct RepodataChannelConfig { impl RepodataChannelConfig { pub fn is_empty(&self) -> bool { - self.disable_jlap.is_none() - && self.disable_bzip2.is_none() + self.disable_bzip2.is_none() && self.disable_zstd.is_none() && self.disable_sharded.is_none() } pub fn merge(&self, other: Self) -> Self { Self { - disable_jlap: self.disable_jlap.or(other.disable_jlap), disable_zstd: self.disable_zstd.or(other.disable_zstd), disable_bzip2: self.disable_bzip2.or(other.disable_bzip2), disable_sharded: self.disable_sharded.or(other.disable_sharded), @@ -481,15 +528,6 @@ impl ExperimentalConfig { // default settings in the future. /// The default maximum number of concurrent solves that can be run at once. /// Defaulting to the number of CPUs available. -/// Emit a one-time deprecation warning for the `disable-jlap` config field. -fn warn_jlap_deprecated() { - static ONCE: std::sync::Once = std::sync::Once::new(); - ONCE.call_once(|| { - tracing::warn!( - "'disable-jlap' is deprecated and has no effect; JLAP support has been removed" - ); - }); -} fn default_max_concurrent_solves() -> usize { std::thread::available_parallelism().map_or(1, |n| n.get()) @@ -1240,7 +1278,7 @@ impl Config { if !unused_keys.is_empty() { tracing::warn!( - "Ignoring '{}' in at {}", + "Ignoring '{}' in {}", console::style( unused_keys .iter() @@ -1260,11 +1298,6 @@ impl Config { .validate() .map_err(|e| ConfigError::ValidationError(e, path.to_path_buf()))?; - // Warn about deprecated fields (once only) - if config.repodata_config.default.disable_jlap.is_some() { - warn_jlap_deprecated(); - } - // check proxy config if config.proxy_config.https.is_none() && config.proxy_config.http.is_none() { if !config.proxy_config.non_proxy_hosts.is_empty() { @@ -1407,7 +1440,6 @@ impl Config { "pypi-config.keyring-provider", "repodata-config", "repodata-config.disable-bzip2", - "repodata-config.disable-jlap", "repodata-config.disable-sharded", "repodata-config.disable-zstd", "run-post-link-scripts", @@ -1683,11 +1715,6 @@ impl Config { let subkey = key.strip_prefix("repodata-config.").unwrap(); match subkey { - "disable-jlap" => { - self.repodata_config.default.disable_jlap = - value.map(|v| v.parse()).transpose().into_diagnostic()?; - warn_jlap_deprecated(); - } "disable-bzip2" => { self.repodata_config.default.disable_bzip2 = value.map(|v| v.parse()).transpose().into_diagnostic()?; @@ -2323,7 +2350,6 @@ UNUSED = "unused" )]), repodata_config: RepodataConfig { default: RepodataChannelConfig { - disable_jlap: Some(true), disable_bzip2: Some(true), disable_sharded: Some(true), disable_zstd: Some(true), @@ -2462,7 +2488,6 @@ UNUSED = "unused" "https://prefix.dev/conda-forge" ] [repodata_config] - disable_jlap = true disable_bzip2 = true disable_zstd = true "#; @@ -2484,7 +2509,6 @@ UNUSED = "unused" Some(&vec![Url::parse("https://prefix.dev/conda-forge").unwrap()]) ); let repodata_config = config.repodata_config; - assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, None); @@ -2669,11 +2693,12 @@ UNUSED = "unused" ); // Test more repodata-config options - config - .set("repodata-config.disable-jlap", Some("true".to_string())) - .unwrap(); - let repodata_config = config.repodata_config(); - assert_eq!(repodata_config.default.disable_jlap, Some(true)); + // disable-jlap has been removed — setting it should error + assert!( + config + .set("repodata-config.disable-jlap", Some("true".to_string())) + .is_err() + ); config .set("repodata-config.disable-bzip2", Some("true".to_string())) @@ -2888,25 +2913,21 @@ UNUSED = "unused" fn test_repodata_config() { let toml = r#" [repodata-config] - disable-jlap = true disable-bzip2 = true disable-zstd = true disable-sharded = true [repodata-config."https://prefix.dev/conda-forge"] - disable-jlap = false disable-bzip2 = false disable-zstd = false disable-sharded = false [repodata-config."https://conda.anaconda.org/conda-forge"] - disable-jlap = false disable-bzip2 = false disable-zstd = false "#; let (config, _) = Config::from_toml(toml, None).unwrap(); let repodata_config = config.repodata_config(); - assert_eq!(repodata_config.default.disable_jlap, Some(true)); assert_eq!(repodata_config.default.disable_bzip2, Some(true)); assert_eq!(repodata_config.default.disable_zstd, Some(true)); assert_eq!(repodata_config.default.disable_sharded, Some(true)); @@ -2917,7 +2938,6 @@ UNUSED = "unused" let prefix_config = per_channel .get(&Url::from_str("https://prefix.dev/conda-forge").unwrap()) .unwrap(); - assert_eq!(prefix_config.disable_jlap, Some(false)); assert_eq!(prefix_config.disable_bzip2, Some(false)); assert_eq!(prefix_config.disable_zstd, Some(false)); assert_eq!(prefix_config.disable_sharded, Some(false)); @@ -2925,7 +2945,6 @@ UNUSED = "unused" let anaconda_config = per_channel .get(&Url::from_str("https://conda.anaconda.org/conda-forge").unwrap()) .unwrap(); - assert_eq!(anaconda_config.disable_jlap, Some(false)); assert_eq!(anaconda_config.disable_bzip2, Some(false)); assert_eq!(anaconda_config.disable_zstd, Some(false)); assert_eq!(anaconda_config.disable_sharded, None); diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap index 063bcde6de..c16519292a 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap @@ -77,9 +77,6 @@ Config { }, repodata_config: RepodataConfig { default: RepodataChannelConfig { - disable_jlap: Some( - true, - ), disable_bzip2: None, disable_zstd: Some( true, diff --git a/crates/pixi_install_pypi/src/plan/validation.rs b/crates/pixi_install_pypi/src/plan/validation.rs index 7a18de45e8..60e4256364 100644 --- a/crates/pixi_install_pypi/src/plan/validation.rs +++ b/crates/pixi_install_pypi/src/plan/validation.rs @@ -145,15 +145,6 @@ pub(crate) fn need_reinstall( )); } } - eprintln!( - "Dirinfo: Editable: {}", - dir_info.editable.unwrap_or_default() - ); - eprintln!("installed: {}", installed_dist.is_editable()); - eprintln!( - "required_dist.is_editable(): {}", - required_dist.is_editable() - ); if dir_info.editable.unwrap_or_default() != required_dist.is_editable() { return Ok(ValidateCurrentInstall::Reinstall( NeedReinstall::EditableStatusChanged { From 45552356517ad10b1a480c87ed22f96605db5995 Mon Sep 17 00:00:00 2001 From: Bas Zalmstra <4995967+baszalmstra@users.noreply.github.com> Date: Fri, 13 Mar 2026 17:10:46 +0100 Subject: [PATCH 15/15] fix: index was removed --- crates/pixi_config/src/lib.rs | 3 - .../src/lock_file/satisfiability/mod.rs | 185 ++++++++++++++++-- .../pixi_config_tomls/main_config.toml | 3 +- 3 files changed, 173 insertions(+), 18 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index 93175d9c07..de1f0ff0fe 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -524,11 +524,8 @@ impl ExperimentalConfig { } } -// Making the default values part of pixi_config to allow for printing the -// default settings in the future. /// The default maximum number of concurrent solves that can be run at once. /// Defaulting to the number of CPUs available. - fn default_max_concurrent_solves() -> usize { std::thread::available_parallelism().map_or(1, |n| n.get()) } diff --git a/crates/pixi_core/src/lock_file/satisfiability/mod.rs b/crates/pixi_core/src/lock_file/satisfiability/mod.rs index e5a1b28b58..6ae1668382 100644 --- a/crates/pixi_core/src/lock_file/satisfiability/mod.rs +++ b/crates/pixi_core/src/lock_file/satisfiability/mod.rs @@ -1179,22 +1179,33 @@ pub(crate) fn pypi_satisfies_requirement( .into()); } - // If the requirement specifies an explicit index, verify the lock-file matches - if let Some(required_index) = index { - let required_url: Url = required_index.url.url().clone().into(); - match &locked_data.index_url { - Some(locked_url) if locked_url == &required_url => {} - other => { - return Err(PlatformUnsat::LockedPyPIIndexMismatch { - name: spec.name.to_string(), - expected_index: required_url.to_string(), - locked_index: other - .as_ref() - .map_or("".to_string(), |u| u.to_string()), + // Verify the index in the requirement matches the lock-file. + match (index, &locked_data.index_url) { + (Some(required_index), locked_index) => { + let required_url: Url = required_index.url.url().clone().into(); + match locked_index { + Some(locked_url) if locked_url == &required_url => {} + other => { + return Err(PlatformUnsat::LockedPyPIIndexMismatch { + name: spec.name.to_string(), + expected_index: required_url.to_string(), + locked_index: other + .as_ref() + .map_or("".to_string(), |u| u.to_string()), + } + .into()); } - .into()); } } + (None, Some(locked_url)) => { + return Err(PlatformUnsat::LockedPyPIIndexMismatch { + name: spec.name.to_string(), + expected_index: "".to_string(), + locked_index: locked_url.to_string(), + } + .into()); + } + (None, None) => {} } Ok(()) @@ -3273,4 +3284,152 @@ mod tests { // A git-based source dependency without a version should still satisfy. pypi_satisfies_requirement(&spec, &locked_data, Path::new("")).unwrap(); } + + /// Regression test: removing a PyPI `index` from the manifest should + /// invalidate the lock-file when the locked package was resolved from that + /// index. + /// + /// Verify that removing an explicit index from a PyPI requirement + /// invalidates the lock-file entry that was resolved from that index. + #[test] + fn test_pypi_index_removed_should_invalidate() { + // Locked data: package was resolved from a custom index. + let locked_data = PypiPackageData { + name: "my-dep".parse().unwrap(), + version: Some(Version::from_str("1.0.0").unwrap()), + location: "https://custom.example.com/simple/packages/my_dep-1.0.0-py3-none-any.whl" + .parse() + .expect("failed to parse url"), + hash: None, + index_url: Some(Url::parse("https://custom.example.com/simple").unwrap()), + requires_dist: vec![], + requires_python: None, + }; + + // Requirement: no index specified (user removed the `index` field). + let spec = pep508_requirement_to_uv_requirement( + pep508_rs::Requirement::from_str("my-dep>=1.0").unwrap(), + ) + .unwrap(); + + let project_root = PathBuf::from_str("/").unwrap(); + + let result = pypi_satisfies_requirement(&spec, &locked_data, &project_root); + assert!( + result.is_err(), + "expected index removal to invalidate satisfiability, \ + but pypi_satisfies_requirement returned Ok(())" + ); + } + + /// Helper to build a `uv_distribution_types::Requirement` with an explicit index. + fn registry_requirement_with_index( + name: &str, + specifier: &str, + index_url: &str, + ) -> uv_distribution_types::Requirement { + use uv_normalize::PackageName as UvPackageName; + use uv_pep440::VersionSpecifiers; + + let index = + uv_distribution_types::IndexMetadata::from(uv_distribution_types::IndexUrl::from( + uv_pep508::VerbatimUrl::from_url(Url::parse(index_url).unwrap().into()), + )); + uv_distribution_types::Requirement { + name: UvPackageName::from_str(name).unwrap(), + extras: vec![].into(), + groups: vec![].into(), + marker: uv_pep508::MarkerTree::TRUE, + source: RequirementSource::Registry { + specifier: VersionSpecifiers::from_str(specifier).unwrap(), + index: Some(index), + conflict: None, + }, + origin: None, + } + } + + /// Verify that changing a PyPI index to a different non-default index + /// invalidates the lock-file. + #[test] + fn test_pypi_index_changed_should_invalidate() { + let locked_data = PypiPackageData { + name: "my-dep".parse().unwrap(), + version: Some(Version::from_str("1.0.0").unwrap()), + location: "https://old-index.example.com/packages/my_dep-1.0.0-py3-none-any.whl" + .parse() + .expect("failed to parse url"), + hash: None, + index_url: Some(Url::parse("https://old-index.example.com/simple").unwrap()), + requires_dist: vec![], + requires_python: None, + }; + + let spec = registry_requirement_with_index( + "my-dep", + ">=1.0", + "https://new-index.example.com/simple", + ); + + let project_root = PathBuf::from_str("/").unwrap(); + let result = pypi_satisfies_requirement(&spec, &locked_data, &project_root); + assert!( + result.is_err(), + "expected index change to invalidate satisfiability" + ); + } + + /// Verify that a matching non-default index is considered satisfiable. + #[test] + fn test_pypi_index_matching_should_satisfy() { + let index_url = "https://custom.example.com/simple"; + let locked_data = PypiPackageData { + name: "my-dep".parse().unwrap(), + version: Some(Version::from_str("1.0.0").unwrap()), + location: "https://custom.example.com/packages/my_dep-1.0.0-py3-none-any.whl" + .parse() + .expect("failed to parse url"), + hash: None, + index_url: Some(Url::parse(index_url).unwrap()), + requires_dist: vec![], + requires_python: None, + }; + + let spec = registry_requirement_with_index("my-dep", ">=1.0", index_url); + + let project_root = PathBuf::from_str("/").unwrap(); + let result = pypi_satisfies_requirement(&spec, &locked_data, &project_root); + assert!( + result.is_ok(), + "expected matching index to satisfy, got: {:?}", + result.unwrap_err() + ); + } + + /// Verify that adding an index to a requirement that was locked without one + /// invalidates the lock-file. + #[test] + fn test_pypi_index_added_should_invalidate() { + let locked_data = PypiPackageData { + name: "my-dep".parse().unwrap(), + version: Some(Version::from_str("1.0.0").unwrap()), + location: "https://pypi.org/packages/my_dep-1.0.0-py3-none-any.whl" + .parse() + .expect("failed to parse url"), + hash: None, + index_url: None, + requires_dist: vec![], + requires_python: None, + }; + + let spec = + registry_requirement_with_index("my-dep", ">=1.0", "https://custom.example.com/simple"); + + let project_root = PathBuf::from_str("/").unwrap(); + let result = pypi_satisfies_requirement(&spec, &locked_data, &project_root); + assert!( + result.is_err(), + "expected adding an index to invalidate satisfiability" + ); + } } diff --git a/docs/source_files/pixi_config_tomls/main_config.toml b/docs/source_files/pixi_config_tomls/main_config.toml index 4c2f5e17eb..9c495a9ffe 100644 --- a/docs/source_files/pixi_config_tomls/main_config.toml +++ b/docs/source_files/pixi_config_tomls/main_config.toml @@ -40,11 +40,10 @@ tool-platform = "win-64" # force tools like build backends to be installed for a # --8<-- [start:repodata-config] [repodata-config] -# disable fetching of jlap, bz2 or zstd repodata files. +# disable fetching of bz2 or zstd repodata files. # This should only be used for specific old versions of artifactory and other non-compliant # servers. disable-bzip2 = true # don't try to download repodata.json.bz2 -disable-jlap = true # deprecated, has no effect (JLAP support removed) disable-sharded = true # don't try to download sharded repodata disable-zstd = true # don't try to download repodata.json.zst # --8<-- [end:repodata-config]