diff --git a/conda_lock/conda_lock.py b/conda_lock/conda_lock.py index 4360cb58..19b50737 100644 --- a/conda_lock/conda_lock.py +++ b/conda_lock/conda_lock.py @@ -76,6 +76,7 @@ from conda_lock.pypi_solver import solve_pypi from conda_lock.src_parser import make_lock_spec from conda_lock.virtual_package import ( + FakeRepoData, default_virtual_package_repodata, virtual_package_repo_from_specification, ) @@ -338,7 +339,6 @@ def make_lock_files( # noqa: C901 src_files=src_files, channel_overrides=channel_overrides, platform_overrides=platform_overrides, - virtual_package_repo=virtual_package_repo, required_categories=required_categories if filter_categories else None, ) original_lock_content: Optional[Lockfile] = None @@ -368,7 +368,9 @@ def make_lock_files( # noqa: C901 update or platform not in platforms_already_locked or not check_input_hash - or lock_spec.content_hash_for_platform(platform) + or lock_spec.content_hash_for_platform( + platform, virtual_package_repo + ) != original_lock_content.metadata.content_hash[platform] ): platforms_to_lock.append(platform) @@ -399,6 +401,7 @@ def make_lock_files( # noqa: C901 metadata_choices=metadata_choices, metadata_yamls=metadata_yamls, strip_auth=strip_auth, + virtual_package_repo=virtual_package_repo, ) if not original_lock_content: @@ -720,11 +723,13 @@ def sanitize_lockfile_line(line: str) -> str: def _solve_for_arch( + *, conda: PathLike, spec: LockSpecification, platform: str, channels: List[Channel], pip_repositories: List[PipRepository], + virtual_package_repo: FakeRepoData, update_spec: Optional[UpdateSpecification] = None, strip_auth: bool = False, ) -> List[LockedDependency]: @@ -767,10 +772,10 @@ def _solve_for_arch( python_version=conda_deps["python"].version, platform=platform, platform_virtual_packages=( - spec.virtual_package_repo.all_repodata.get( - platform, {"packages": None} - )["packages"] - if spec.virtual_package_repo + virtual_package_repo.all_repodata.get(platform, {"packages": None})[ + "packages" + ] + if virtual_package_repo else None ), pip_repositories=pip_repositories, @@ -821,14 +826,13 @@ def create_lockfile_from_spec( metadata_choices: AbstractSet[MetadataOption] = frozenset(), metadata_yamls: Sequence[pathlib.Path] = (), strip_auth: bool = False, + virtual_package_repo: FakeRepoData, ) -> Lockfile: """ Solve or update specification """ if platforms is None: platforms = [] - assert spec.virtual_package_repo is not None - virtual_package_channel = spec.virtual_package_repo.channel locked: Dict[Tuple[str, str, str], LockedDependency] = {} @@ -837,8 +841,9 @@ def create_lockfile_from_spec( conda=conda, spec=spec, platform=platform, - channels=[*spec.channels, virtual_package_channel], + channels=[*spec.channels, virtual_package_repo.channel], pip_repositories=spec.pip_repositories, + virtual_package_repo=virtual_package_repo, update_spec=update_spec, strip_auth=strip_auth, ) @@ -888,11 +893,12 @@ def create_lockfile_from_spec( inputs_metadata = None custom_metadata = get_custom_metadata(metadata_yamls=metadata_yamls) + content_hash = spec.content_hash(virtual_package_repo) return Lockfile( package=[locked[k] for k in locked], metadata=LockMeta( - content_hash=spec.content_hash(), + content_hash=content_hash, channels=[c for c in spec.channels], platforms=spec.platforms, sources=list(meta_sources.keys()), diff --git a/conda_lock/models/lock_spec.py b/conda_lock/models/lock_spec.py index 8cb13875..db69960d 100644 --- a/conda_lock/models/lock_spec.py +++ b/conda_lock/models/lock_spec.py @@ -64,20 +64,23 @@ class LockSpecification(BaseModel): channels: List[Channel] sources: List[pathlib.Path] pip_repositories: List[PipRepository] = Field(default_factory=list) - virtual_package_repo: Optional[FakeRepoData] = None allow_pypi_requests: bool = True @property def platforms(self) -> List[str]: return list(self.dependencies.keys()) - def content_hash(self) -> Dict[str, str]: + def content_hash( + self, virtual_package_repo: Optional[FakeRepoData] + ) -> Dict[str, str]: return { - platform: self.content_hash_for_platform(platform) + platform: self.content_hash_for_platform(platform, virtual_package_repo) for platform in self.platforms } - def content_hash_for_platform(self, platform: str) -> str: + def content_hash_for_platform( + self, platform: str, virtual_package_repo: Optional[FakeRepoData] + ) -> str: data = { "channels": [c.json() for c in self.channels], "specs": [ @@ -89,8 +92,8 @@ def content_hash_for_platform(self, platform: str) -> str: } if self.pip_repositories: data["pip_repositories"] = [repo.json() for repo in self.pip_repositories] - if self.virtual_package_repo is not None: - vpr_data = self.virtual_package_repo.all_repodata + if virtual_package_repo is not None: + vpr_data = virtual_package_repo.all_repodata data["virtual_package_hash"] = { "noarch": vpr_data.get("noarch", {}), **{platform: vpr_data.get(platform, {})}, diff --git a/conda_lock/src_parser/__init__.py b/conda_lock/src_parser/__init__.py index 02889974..7fa4f3b9 100644 --- a/conda_lock/src_parser/__init__.py +++ b/conda_lock/src_parser/__init__.py @@ -75,7 +75,6 @@ def _parse_source_files( def make_lock_spec( *, src_files: List[pathlib.Path], - virtual_package_repo: FakeRepoData, channel_overrides: Optional[Sequence[str]] = None, pip_repository_overrides: Optional[Sequence[str]] = None, platform_overrides: Optional[Sequence[str]] = None, @@ -131,6 +130,5 @@ def dep_has_category(d: Dependency, categories: AbstractSet[str]) -> bool: channels=channels, pip_repositories=pip_repositories, sources=aggregated_lock_spec.sources, - virtual_package_repo=virtual_package_repo, allow_pypi_requests=aggregated_lock_spec.allow_pypi_requests, ) diff --git a/tests/test_conda_lock.py b/tests/test_conda_lock.py index 0cb8a10b..c3a0041b 100644 --- a/tests/test_conda_lock.py +++ b/tests/test_conda_lock.py @@ -779,25 +779,21 @@ def test_poetry_no_pypi_multiple_pyprojects( poetry_pyproject_toml_no_pypi: Path, poetry_pyproject_toml_no_pypi_other_projects: List[Path], ): - virtual_package_repo = default_virtual_package_repodata() - with virtual_package_repo: - spec = make_lock_spec( - src_files=poetry_pyproject_toml_no_pypi_other_projects, - virtual_package_repo=virtual_package_repo, - ) - assert ( - spec.allow_pypi_requests is True - ), "PyPI requests should be allowed when all pyprojects.toml allow PyPI requests" - spec = make_lock_spec( - src_files=[ - *poetry_pyproject_toml_no_pypi_other_projects, - poetry_pyproject_toml_no_pypi, - ], - virtual_package_repo=virtual_package_repo, - ) - assert ( - spec.allow_pypi_requests is False - ), "PyPI requests should be forbidden when at least one pyproject.toml forbids PyPI requests" + spec = make_lock_spec( + src_files=poetry_pyproject_toml_no_pypi_other_projects, + ) + assert ( + spec.allow_pypi_requests is True + ), "PyPI requests should be allowed when all pyprojects.toml allow PyPI requests" + spec = make_lock_spec( + src_files=[ + *poetry_pyproject_toml_no_pypi_other_projects, + poetry_pyproject_toml_no_pypi, + ], + ) + assert ( + spec.allow_pypi_requests is False + ), "PyPI requests should be forbidden when at least one pyproject.toml forbids PyPI requests" def test_prepare_repositories_pool(): @@ -813,38 +809,32 @@ def contains_pypi(pool): def test_spec_poetry(poetry_pyproject_toml: Path): - virtual_package_repo = default_virtual_package_repodata() - with virtual_package_repo: - spec = make_lock_spec( - src_files=[poetry_pyproject_toml], virtual_package_repo=virtual_package_repo - ) - for plat in spec.platforms: - deps = {d.name for d in spec.dependencies[plat]} - assert "tomlkit" in deps - assert "pytest" in deps - assert "requests" in deps - - spec = make_lock_spec( - src_files=[poetry_pyproject_toml], - virtual_package_repo=virtual_package_repo, - required_categories={"main", "dev"}, - ) - for plat in spec.platforms: - deps = {d.name for d in spec.dependencies[plat]} - assert "tomlkit" not in deps - assert "pytest" in deps - assert "requests" in deps - - spec = make_lock_spec( - src_files=[poetry_pyproject_toml], - virtual_package_repo=virtual_package_repo, - required_categories={"main"}, - ) - for plat in spec.platforms: - deps = {d.name for d in spec.dependencies[plat]} - assert "tomlkit" not in deps - assert "pytest" not in deps - assert "requests" in deps + spec = make_lock_spec(src_files=[poetry_pyproject_toml]) + for plat in spec.platforms: + deps = {d.name for d in spec.dependencies[plat]} + assert "tomlkit" in deps + assert "pytest" in deps + assert "requests" in deps + + spec = make_lock_spec( + src_files=[poetry_pyproject_toml], + required_categories={"main", "dev"}, + ) + for plat in spec.platforms: + deps = {d.name for d in spec.dependencies[plat]} + assert "tomlkit" not in deps + assert "pytest" in deps + assert "requests" in deps + + spec = make_lock_spec( + src_files=[poetry_pyproject_toml], + required_categories={"main"}, + ) + for plat in spec.platforms: + deps = {d.name for d in spec.dependencies[plat]} + assert "tomlkit" not in deps + assert "pytest" not in deps + assert "requests" in deps def test_parse_flit(flit_pyproject_toml: Path): @@ -1578,13 +1568,8 @@ def test_run_lock_with_local_package( monkeypatch.chdir(pip_local_package_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - virtual_package_repo = default_virtual_package_repodata() - with virtual_package_repo: - lock_spec = make_lock_spec( - src_files=[pip_local_package_environment], - virtual_package_repo=virtual_package_repo, - ) + lock_spec = make_lock_spec(src_files=[pip_local_package_environment]) assert not any( p.manager == "pip" for platform in lock_spec.platforms @@ -1664,13 +1649,13 @@ def test_poetry_version_parsing_constraints( # NB: this file must exist for relative path resolution to work # in create_lockfile_from_spec sources=[Path(tf.name)], - virtual_package_repo=vpr, ) lockfile_contents = create_lockfile_from_spec( conda=_conda_exe, spec=spec, lockfile_path=Path(DEFAULT_LOCKFILE_NAME), metadata_yamls=(), + virtual_package_repo=vpr, ) python = next(p for p in lockfile_contents.package if p.name == "python") @@ -1735,7 +1720,7 @@ def test_aggregate_lock_specs(): sources=[], ) assert actual.dict(exclude={"sources"}) == expected.dict(exclude={"sources"}) - assert actual.content_hash() == expected.content_hash() + assert actual.content_hash(None) == expected.content_hash(None) def test_aggregate_lock_specs_override_version(): @@ -2279,25 +2264,23 @@ def test_virtual_packages( def test_virtual_package_input_hash_stability(): from conda_lock.virtual_package import virtual_package_repo_from_specification - test_dir = TESTS_DIR.joinpath("test-cuda") - vspec = test_dir / "virtual-packages-old-glibc.yaml" - - vpr = virtual_package_repo_from_specification(vspec) spec = LockSpecification( dependencies={"linux-64": []}, channels=[], sources=[], - virtual_package_repo=vpr, ) + + test_dir = TESTS_DIR.joinpath("test-cuda") + vspec = test_dir / "virtual-packages-old-glibc.yaml" + vpr = virtual_package_repo_from_specification(vspec) + expected = "8ee5fc79fca4cb7732d2e88443209e0a3a354da9899cb8899d94f9b1dcccf975" - assert spec.content_hash() == {"linux-64": expected} + assert spec.content_hash(vpr) == {"linux-64": expected} def test_default_virtual_package_input_hash_stability(): from conda_lock.virtual_package import default_virtual_package_repodata - vpr = default_virtual_package_repodata() - expected = { "linux-64": "a949aac83da089258ce729fcd54dc0a3a1724ea325d67680d7a6d7cc9c0f1d1b", "linux-aarch64": "f68603a3a28dbb03d20a25e1dacda3c42b6acc8a93bd31e13c4956115820cfa6", @@ -2311,9 +2294,9 @@ def test_default_virtual_package_input_hash_stability(): dependencies={platform: [] for platform in expected.keys()}, channels=[], sources=[], - virtual_package_repo=vpr, ) - assert spec.content_hash() == expected + vpr = default_virtual_package_repodata() + assert spec.content_hash(vpr) == expected @pytest.fixture