Commit 36ca3271 by Riccardo Albertazzi Committed by GitHub

refactor: extract cache utilities (#7621)

Co-authored-by: Randy Döring <30527984+radoering@users.noreply.github.com>
parent b8e912dc
......@@ -210,7 +210,11 @@ class Config:
@property
def repository_cache_directory(self) -> Path:
return Path(self.get("cache-dir")) / "cache" / "repositories"
return Path(self.get("cache-dir")).expanduser() / "cache" / "repositories"
@property
def artifacts_cache_directory(self) -> Path:
return Path(self.get("cache-dir")).expanduser() / "artifacts"
@property
def virtualenvs_path(self) -> Path:
......
from __future__ import annotations
import hashlib
import json
import tarfile
import tempfile
import zipfile
......@@ -19,18 +17,14 @@ from build.env import IsolatedEnv as BaseIsolatedEnv
from poetry.core.utils.helpers import temporary_directory
from pyproject_hooks import quiet_subprocess_runner # type: ignore[import]
from poetry.installation.chooser import InvalidWheelName
from poetry.installation.chooser import Wheel
from poetry.utils.env import ephemeral_environment
if TYPE_CHECKING:
from contextlib import AbstractContextManager
from poetry.core.packages.utils.link import Link
from poetry.config.config import Config
from poetry.repositories import RepositoryPool
from poetry.utils.cache import ArtifactCache
from poetry.utils.env import Env
......@@ -86,12 +80,12 @@ class IsolatedEnv(BaseIsolatedEnv):
class Chef:
def __init__(self, config: Config, env: Env, pool: RepositoryPool) -> None:
def __init__(
self, artifact_cache: ArtifactCache, env: Env, pool: RepositoryPool
) -> None:
self._env = env
self._pool = pool
self._cache_dir = (
Path(config.get("cache-dir")).expanduser().joinpath("artifacts")
)
self._artifact_cache = artifact_cache
def prepare(
self, archive: Path, output_dir: Path | None = None, *, editable: bool = False
......@@ -181,7 +175,9 @@ class Chef:
sdist_dir = archive_dir
if destination is None:
destination = self.get_cache_directory_for_link(Link(archive.as_uri()))
destination = self._artifact_cache.get_cache_directory_for_link(
Link(archive.as_uri())
)
destination.mkdir(parents=True, exist_ok=True)
......@@ -196,72 +192,3 @@ class Chef:
@classmethod
def _is_wheel(cls, archive: Path) -> bool:
return archive.suffix == ".whl"
def get_cached_archive_for_link(self, link: Link, *, strict: bool) -> Path | None:
archives = self.get_cached_archives_for_link(link)
if not archives:
return None
candidates: list[tuple[float | None, Path]] = []
for archive in archives:
if strict:
# in strict mode return the original cached archive instead of the
# prioritized archive type.
if link.filename == archive.name:
return archive
continue
if archive.suffix != ".whl":
candidates.append((float("inf"), archive))
continue
try:
wheel = Wheel(archive.name)
except InvalidWheelName:
continue
if not wheel.is_supported_by_environment(self._env):
continue
candidates.append(
(wheel.get_minimum_supported_index(self._env.supported_tags), archive),
)
if not candidates:
return None
return min(candidates)[1]
def get_cached_archives_for_link(self, link: Link) -> list[Path]:
cache_dir = self.get_cache_directory_for_link(link)
archive_types = ["whl", "tar.gz", "tar.bz2", "bz2", "zip"]
paths = []
for archive_type in archive_types:
for archive in cache_dir.glob(f"*.{archive_type}"):
paths.append(Path(archive))
return paths
def get_cache_directory_for_link(self, link: Link) -> Path:
key_parts = {"url": link.url_without_fragment}
if link.hash_name is not None and link.hash is not None:
key_parts[link.hash_name] = link.hash
if link.subdirectory_fragment:
key_parts["subdirectory"] = link.subdirectory_fragment
key_parts["interpreter_name"] = self._env.marker_env["interpreter_name"]
key_parts["interpreter_version"] = "".join(
self._env.marker_env["interpreter_version"].split(".")[:2]
)
key = hashlib.sha256(
json.dumps(
key_parts, sort_keys=True, separators=(",", ":"), ensure_ascii=True
).encode("ascii")
).hexdigest()
split_key = [key[:2], key[2:4], key[4:6], key[6:]]
return self._cache_dir.joinpath(*split_key)
......@@ -6,11 +6,9 @@ import re
from typing import TYPE_CHECKING
from typing import Any
from packaging.tags import Tag
from poetry.config.config import Config
from poetry.config.config import PackageFilterPolicy
from poetry.utils.patterns import wheel_file_re
from poetry.utils.wheel import Wheel
if TYPE_CHECKING:
......@@ -25,37 +23,6 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
class InvalidWheelName(Exception):
pass
class Wheel:
def __init__(self, filename: str) -> None:
wheel_info = wheel_file_re.match(filename)
if not wheel_info:
raise InvalidWheelName(f"{filename} is not a valid wheel filename.")
self.filename = filename
self.name = wheel_info.group("name").replace("_", "-")
self.version = wheel_info.group("ver").replace("_", "-")
self.build_tag = wheel_info.group("build")
self.pyversions = wheel_info.group("pyver").split(".")
self.abis = wheel_info.group("abi").split(".")
self.plats = wheel_info.group("plat").split(".")
self.tags = {
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
}
def get_minimum_supported_index(self, tags: list[Tag]) -> int | None:
indexes = [tags.index(t) for t in self.tags if t in tags]
return min(indexes) if indexes else None
def is_supported_by_environment(self, env: Env) -> bool:
return bool(set(env.supported_tags).intersection(self.tags))
class Chooser:
"""
A Chooser chooses an appropriate release archive for packages.
......
......@@ -27,6 +27,7 @@ from poetry.installation.wheel_installer import WheelInstaller
from poetry.puzzle.exceptions import SolverProblemError
from poetry.utils._compat import decode
from poetry.utils.authenticator import Authenticator
from poetry.utils.cache import ArtifactCache
from poetry.utils.env import EnvCommandError
from poetry.utils.helpers import atomic_open
from poetry.utils.helpers import get_file_hash
......@@ -77,10 +78,11 @@ class Executor:
else:
self._max_workers = 1
self._artifact_cache = ArtifactCache(cache_dir=config.artifacts_cache_directory)
self._authenticator = Authenticator(
config, self._io, disable_cache=disable_cache, pool_size=self._max_workers
)
self._chef = Chef(config, self._env, pool)
self._chef = Chef(self._artifact_cache, self._env, pool)
self._chooser = Chooser(pool, self._env, config)
self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
......@@ -709,15 +711,19 @@ class Executor:
def _download_link(self, operation: Install | Update, link: Link) -> Path:
package = operation.package
output_dir = self._chef.get_cache_directory_for_link(link)
output_dir = self._artifact_cache.get_cache_directory_for_link(link)
# Try to get cached original package for the link provided
original_archive = self._chef.get_cached_archive_for_link(link, strict=True)
original_archive = self._artifact_cache.get_cached_archive_for_link(
link, strict=True
)
if original_archive is None:
# No cached original distributions was found, so we download and prepare it
try:
original_archive = self._download_archive(operation, link)
except BaseException:
cache_directory = self._chef.get_cache_directory_for_link(link)
cache_directory = self._artifact_cache.get_cache_directory_for_link(
link
)
cached_file = cache_directory.joinpath(link.filename)
# We can't use unlink(missing_ok=True) because it's not available
# prior to Python 3.8
......@@ -728,7 +734,11 @@ class Executor:
# Get potential higher prioritized cached archive, otherwise it will fall back
# to the original archive.
archive = self._chef.get_cached_archive_for_link(link, strict=False)
archive = self._artifact_cache.get_cached_archive_for_link(
link,
strict=False,
env=self._env,
)
# 'archive' can at this point never be None. Since we previously downloaded
# an archive, we now should have something cached that we can use here
assert archive is not None
......@@ -792,7 +802,9 @@ class Executor:
progress.start()
done = 0
archive = self._chef.get_cache_directory_for_link(link) / link.filename
archive = (
self._artifact_cache.get_cache_directory_for_link(link) / link.filename
)
archive.parent.mkdir(parents=True, exist_ok=True)
with atomic_open(archive) as f:
for chunk in response.iter_content(chunk_size=4096):
......
......@@ -8,11 +8,21 @@ import shutil
import time
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
from typing import Callable
from typing import Generic
from typing import TypeVar
from poetry.utils.wheel import InvalidWheelName
from poetry.utils.wheel import Wheel
if TYPE_CHECKING:
from poetry.core.packages.utils.link import Link
from poetry.utils.env import Env
# Used by Cachy for items that do not expire.
MAX_DATE = 9999999999
......@@ -196,3 +206,83 @@ class FileCache(Generic[T]):
data = json.loads(data_str[10:])
expires = int(data_str[:10])
return CacheItem(data, expires)
class ArtifactCache:
def __init__(self, *, cache_dir: Path) -> None:
self._cache_dir = cache_dir
def get_cache_directory_for_link(self, link: Link) -> Path:
key_parts = {"url": link.url_without_fragment}
if link.hash_name is not None and link.hash is not None:
key_parts[link.hash_name] = link.hash
if link.subdirectory_fragment:
key_parts["subdirectory"] = link.subdirectory_fragment
key = hashlib.sha256(
json.dumps(
key_parts, sort_keys=True, separators=(",", ":"), ensure_ascii=True
).encode("ascii")
).hexdigest()
split_key = [key[:2], key[2:4], key[4:6], key[6:]]
return self._cache_dir.joinpath(*split_key)
def get_cached_archive_for_link(
self,
link: Link,
*,
strict: bool,
env: Env | None = None,
) -> Path | None:
assert strict or env is not None
archives = self._get_cached_archives_for_link(link)
if not archives:
return None
candidates: list[tuple[float | None, Path]] = []
for archive in archives:
if strict:
# in strict mode return the original cached archive instead of the
# prioritized archive type.
if link.filename == archive.name:
return archive
continue
assert env is not None
if archive.suffix != ".whl":
candidates.append((float("inf"), archive))
continue
try:
wheel = Wheel(archive.name)
except InvalidWheelName:
continue
if not wheel.is_supported_by_environment(env):
continue
candidates.append(
(wheel.get_minimum_supported_index(env.supported_tags), archive),
)
if not candidates:
return None
return min(candidates)[1]
def _get_cached_archives_for_link(self, link: Link) -> list[Path]:
cache_dir = self.get_cache_directory_for_link(link)
archive_types = ["whl", "tar.gz", "tar.bz2", "bz2", "zip"]
paths = []
for archive_type in archive_types:
for archive in cache_dir.glob(f"*.{archive_type}"):
paths.append(Path(archive))
return paths
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from packaging.tags import Tag
from poetry.utils.patterns import wheel_file_re
if TYPE_CHECKING:
from poetry.utils.env import Env
logger = logging.getLogger(__name__)
class InvalidWheelName(Exception):
pass
class Wheel:
def __init__(self, filename: str) -> None:
wheel_info = wheel_file_re.match(filename)
if not wheel_info:
raise InvalidWheelName(f"{filename} is not a valid wheel filename.")
self.filename = filename
self.name = wheel_info.group("name").replace("_", "-")
self.version = wheel_info.group("ver").replace("_", "-")
self.build_tag = wheel_info.group("build")
self.pyversions = wheel_info.group("pyver").split(".")
self.abis = wheel_info.group("abi").split(".")
self.plats = wheel_info.group("plat").split(".")
self.tags = {
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
}
def get_minimum_supported_index(self, tags: list[Tag]) -> int | None:
indexes = [tags.index(t) for t in self.tags if t in tags]
return min(indexes) if indexes else None
def is_supported_by_environment(self, env: Env) -> bool:
return bool(set(env.supported_tags).intersection(self.tags))
......@@ -9,14 +9,13 @@ from zipfile import ZipFile
import pytest
from packaging.tags import Tag
from poetry.core.packages.utils.link import Link
from poetry.factory import Factory
from poetry.installation.chef import Chef
from poetry.repositories import RepositoryPool
from poetry.utils.cache import ArtifactCache
from poetry.utils.env import EnvManager
from poetry.utils.env import MockEnv
from tests.repositories.test_pypi_repository import MockRepository
......@@ -24,6 +23,7 @@ if TYPE_CHECKING:
from pytest_mock import MockerFixture
from tests.conftest import Config
from tests.types import FixtureDirGetter
@pytest.fixture()
......@@ -40,166 +40,22 @@ def setup(mocker: MockerFixture, pool: RepositoryPool) -> None:
mocker.patch.object(Factory, "create_pool", return_value=pool)
@pytest.mark.parametrize(
("link", "strict", "available_packages"),
[
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
True,
[
Path("/cache/demo-0.1.0-py2.py3-none-any"),
Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
],
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
[],
),
],
)
def test_get_not_found_cached_archive_for_link(
config: Config,
mocker: MockerFixture,
link: str,
strict: bool,
available_packages: list[Path],
):
chef = Chef(
config,
MockEnv(
version_info=(3, 8, 3),
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"},
supported_tags=[
Tag("cp38", "cp38", "macosx_10_15_x86_64"),
Tag("py3", "none", "any"),
],
),
Factory.create_pool(config),
)
mocker.patch.object(
chef, "get_cached_archives_for_link", return_value=available_packages
)
archive = chef.get_cached_archive_for_link(Link(link), strict=strict)
assert archive is None
@pytest.mark.parametrize(
("link", "cached", "strict"),
[
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
),
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
"/cache/demo-0.1.0.tar.gz",
True,
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
True,
),
],
)
def test_get_found_cached_archive_for_link(
config: Config, mocker: MockerFixture, link: str, cached: str, strict: bool
):
chef = Chef(
config,
MockEnv(
version_info=(3, 8, 3),
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"},
supported_tags=[
Tag("cp38", "cp38", "macosx_10_15_x86_64"),
Tag("py3", "none", "any"),
],
),
Factory.create_pool(config),
)
mocker.patch.object(
chef,
"get_cached_archives_for_link",
return_value=[
Path("/cache/demo-0.1.0-py2.py3-none-any"),
Path("/cache/demo-0.1.0.tar.gz"),
Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
],
)
@pytest.fixture
def artifact_cache(config: Config) -> ArtifactCache:
return ArtifactCache(cache_dir=config.artifacts_cache_directory)
archive = chef.get_cached_archive_for_link(Link(link), strict=strict)
assert Path(cached) == archive
def test_get_cached_archives_for_link(config: Config, mocker: MockerFixture):
chef = Chef(
config,
MockEnv(
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"}
),
Factory.create_pool(config),
)
distributions = Path(__file__).parent.parent.joinpath("fixtures/distributions")
mocker.patch.object(
chef,
"get_cache_directory_for_link",
return_value=distributions,
)
archives = chef.get_cached_archives_for_link(
Link("https://files.python-poetry.org/demo-0.1.0.tar.gz")
)
assert archives
assert set(archives) == set(distributions.glob("demo-0.1.*"))
def test_get_cache_directory_for_link(config: Config, config_cache_dir: Path):
def test_prepare_sdist(
config: Config,
config_cache_dir: Path,
artifact_cache: ArtifactCache,
fixture_dir: FixtureDirGetter,
) -> None:
chef = Chef(
config,
MockEnv(
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"}
),
Factory.create_pool(config),
)
directory = chef.get_cache_directory_for_link(
Link("https://files.python-poetry.org/poetry-1.1.0.tar.gz")
)
expected = Path(
f"{config_cache_dir.as_posix()}/artifacts/ba/63/13/"
"283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648"
)
assert directory == expected
def test_prepare_sdist(config: Config, config_cache_dir: Path) -> None:
chef = Chef(config, EnvManager.get_system_env(), Factory.create_pool(config))
archive = (
Path(__file__)
.parent.parent.joinpath("fixtures/distributions/demo-0.1.0.tar.gz")
.resolve()
artifact_cache, EnvManager.get_system_env(), Factory.create_pool(config)
)
destination = chef.get_cache_directory_for_link(Link(archive.as_uri()))
archive = (fixture_dir("distributions") / "demo-0.1.0.tar.gz").resolve()
destination = artifact_cache.get_cache_directory_for_link(Link(archive.as_uri()))
wheel = chef.prepare(archive)
......@@ -207,10 +63,16 @@ def test_prepare_sdist(config: Config, config_cache_dir: Path) -> None:
assert wheel.name == "demo-0.1.0-py3-none-any.whl"
def test_prepare_directory(config: Config, config_cache_dir: Path):
chef = Chef(config, EnvManager.get_system_env(), Factory.create_pool(config))
archive = Path(__file__).parent.parent.joinpath("fixtures/simple_project").resolve()
def test_prepare_directory(
config: Config,
config_cache_dir: Path,
artifact_cache: ArtifactCache,
fixture_dir: FixtureDirGetter,
):
chef = Chef(
artifact_cache, EnvManager.get_system_env(), Factory.create_pool(config)
)
archive = fixture_dir("simple_project").resolve()
wheel = chef.prepare(archive)
......@@ -222,16 +84,14 @@ def test_prepare_directory(config: Config, config_cache_dir: Path):
def test_prepare_directory_with_extensions(
config: Config, config_cache_dir: Path
config: Config,
config_cache_dir: Path,
artifact_cache: ArtifactCache,
fixture_dir: FixtureDirGetter,
) -> None:
env = EnvManager.get_system_env()
chef = Chef(config, env, Factory.create_pool(config))
archive = (
Path(__file__)
.parent.parent.joinpath("fixtures/extended_with_no_setup")
.resolve()
)
chef = Chef(artifact_cache, env, Factory.create_pool(config))
archive = fixture_dir("extended_with_no_setup").resolve()
wheel = chef.prepare(archive)
......@@ -242,10 +102,16 @@ def test_prepare_directory_with_extensions(
os.unlink(wheel)
def test_prepare_directory_editable(config: Config, config_cache_dir: Path):
chef = Chef(config, EnvManager.get_system_env(), Factory.create_pool(config))
archive = Path(__file__).parent.parent.joinpath("fixtures/simple_project").resolve()
def test_prepare_directory_editable(
config: Config,
config_cache_dir: Path,
artifact_cache: ArtifactCache,
fixture_dir: FixtureDirGetter,
):
chef = Chef(
artifact_cache, EnvManager.get_system_env(), Factory.create_pool(config)
)
archive = fixture_dir("simple_project").resolve()
wheel = chef.prepare(archive, editable=True)
......
......@@ -32,6 +32,7 @@ from poetry.installation.operations import Uninstall
from poetry.installation.operations import Update
from poetry.installation.wheel_installer import WheelInstaller
from poetry.repositories.repository_pool import RepositoryPool
from poetry.utils.cache import ArtifactCache
from poetry.utils.env import MockEnv
from tests.repositories.test_pypi_repository import MockRepository
......@@ -93,7 +94,7 @@ def env(tmp_dir: str) -> MockEnv:
return MockEnv(path=path, is_venv=True)
@pytest.fixture()
@pytest.fixture
def io() -> BufferedIO:
io = BufferedIO()
io.output.formatter.set_style("c1_dark", Style("cyan", options=["dark"]))
......@@ -104,7 +105,7 @@ def io() -> BufferedIO:
return io
@pytest.fixture()
@pytest.fixture
def io_decorated() -> BufferedIO:
io = BufferedIO(decorated=True)
io.output.formatter.set_style("c1", Style("cyan"))
......@@ -113,14 +114,14 @@ def io_decorated() -> BufferedIO:
return io
@pytest.fixture()
@pytest.fixture
def io_not_decorated() -> BufferedIO:
io = BufferedIO(decorated=False)
return io
@pytest.fixture()
@pytest.fixture
def pool() -> RepositoryPool:
pool = RepositoryPool()
pool.add_repository(MockRepository())
......@@ -128,8 +129,15 @@ def pool() -> RepositoryPool:
return pool
@pytest.fixture()
def mock_file_downloads(http: type[httpretty.httpretty]) -> None:
@pytest.fixture
def artifact_cache(config: Config) -> ArtifactCache:
return ArtifactCache(cache_dir=config.artifacts_cache_directory)
@pytest.fixture
def mock_file_downloads(
http: type[httpretty.httpretty], fixture_dir: FixtureDirGetter
) -> None:
def callback(
request: HTTPrettyRequest, uri: str, headers: dict[str, Any]
) -> list[int | dict[str, Any] | str]:
......@@ -141,12 +149,10 @@ def mock_file_downloads(http: type[httpretty.httpretty]) -> None:
if not fixture.exists():
if name == "demo-0.1.0.tar.gz":
fixture = Path(__file__).parent.parent.joinpath(
"fixtures/distributions/demo-0.1.0.tar.gz"
)
fixture = fixture_dir("distributions") / "demo-0.1.0.tar.gz"
else:
fixture = Path(__file__).parent.parent.joinpath(
"fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
fixture = (
fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
)
return [200, headers, fixture.read_bytes()]
......@@ -158,32 +164,25 @@ def mock_file_downloads(http: type[httpretty.httpretty]) -> None:
)
@pytest.fixture()
def copy_wheel(tmp_dir: Path) -> Callable[[], Path]:
@pytest.fixture
def copy_wheel(tmp_dir: Path, fixture_dir: FixtureDirGetter) -> Callable[[], Path]:
def _copy_wheel() -> Path:
tmp_name = tempfile.mktemp()
Path(tmp_dir).joinpath(tmp_name).mkdir()
shutil.copyfile(
Path(__file__)
.parent.parent.joinpath(
"fixtures/distributions/demo-0.1.2-py2.py3-none-any.whl"
)
.as_posix(),
Path(tmp_dir)
.joinpath(tmp_name)
.joinpath("demo-0.1.2-py2.py3-none-any.whl")
.as_posix(),
(
fixture_dir("distributions") / "demo-0.1.2-py2.py3-none-any.whl"
).as_posix(),
(Path(tmp_dir) / tmp_name / "demo-0.1.2-py2.py3-none-any.whl").as_posix(),
)
return (
Path(tmp_dir).joinpath(tmp_name).joinpath("demo-0.1.2-py2.py3-none-any.whl")
)
return Path(tmp_dir) / tmp_name / "demo-0.1.2-py2.py3-none-any.whl"
return _copy_wheel
@pytest.fixture()
@pytest.fixture
def wheel(copy_wheel: Callable[[], Path]) -> Path:
archive = copy_wheel()
......@@ -202,13 +201,15 @@ def test_execute_executes_a_batch_of_operations(
mock_file_downloads: None,
env: MockEnv,
copy_wheel: Callable[[], Path],
fixture_dir: FixtureDirGetter,
):
wheel_install = mocker.patch.object(WheelInstaller, "install")
config.merge({"cache-dir": tmp_dir})
artifact_cache = ArtifactCache(cache_dir=config.artifacts_cache_directory)
prepare_spy = mocker.spy(Chef, "_prepare")
chef = Chef(config, env, Factory.create_pool(config))
chef = Chef(artifact_cache, env, Factory.create_pool(config))
chef.set_directory_wheel([copy_wheel(), copy_wheel()])
chef.set_sdist_wheel(copy_wheel())
......@@ -221,10 +222,7 @@ def test_execute_executes_a_batch_of_operations(
"demo",
"0.1.0",
source_type="file",
source_url=Path(__file__)
.parent.parent.joinpath(
"fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
)
source_url=(fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl")
.resolve()
.as_posix(),
)
......@@ -233,10 +231,7 @@ def test_execute_executes_a_batch_of_operations(
"simple-project",
"1.2.3",
source_type="directory",
source_url=Path(__file__)
.parent.parent.joinpath("fixtures/simple_project")
.resolve()
.as_posix(),
source_url=fixture_dir("simple_project").resolve().as_posix(),
)
git_package = Package(
......@@ -527,10 +522,9 @@ def test_executor_should_delete_incomplete_downloads(
pool: RepositoryPool,
mock_file_downloads: None,
env: MockEnv,
fixture_dir: FixtureDirGetter,
):
fixture = Path(__file__).parent.parent.joinpath(
"fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
)
fixture = fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
destination_fixture = Path(tmp_dir) / "tomlkit-0.5.3-py2.py3-none-any.whl"
shutil.copyfile(str(fixture), str(destination_fixture))
mocker.patch(
......@@ -538,11 +532,11 @@ def test_executor_should_delete_incomplete_downloads(
side_effect=Exception("Download error"),
)
mocker.patch(
"poetry.installation.chef.Chef.get_cached_archive_for_link",
side_effect=lambda link, strict: None,
"poetry.installation.executor.ArtifactCache.get_cached_archive_for_link",
return_value=None,
)
mocker.patch(
"poetry.installation.chef.Chef.get_cache_directory_for_link",
"poetry.installation.executor.ArtifactCache.get_cache_directory_for_link",
return_value=Path(tmp_dir),
)
......@@ -624,15 +618,13 @@ def test_executor_should_not_write_pep610_url_references_for_cached_package(
def test_executor_should_write_pep610_url_references_for_wheel_files(
tmp_venv: VirtualEnv, pool: RepositoryPool, config: Config, io: BufferedIO
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
io: BufferedIO,
fixture_dir: FixtureDirGetter,
):
url = (
Path(__file__)
.parent.parent.joinpath(
"fixtures/distributions/demo-0.1.0-py2.py3-none-any.whl"
)
.resolve()
)
url = (fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl").resolve()
package = Package("demo", "0.1.0", source_type="file", source_url=url.as_posix())
# Set package.files so the executor will attempt to hash the package
package.files = [
......@@ -658,13 +650,13 @@ def test_executor_should_write_pep610_url_references_for_wheel_files(
def test_executor_should_write_pep610_url_references_for_non_wheel_files(
tmp_venv: VirtualEnv, pool: RepositoryPool, config: Config, io: BufferedIO
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
io: BufferedIO,
fixture_dir: FixtureDirGetter,
):
url = (
Path(__file__)
.parent.parent.joinpath("fixtures/distributions/demo-0.1.0.tar.gz")
.resolve()
)
url = (fixture_dir("distributions") / "demo-0.1.0.tar.gz").resolve()
package = Package("demo", "0.1.0", source_type="file", source_url=url.as_posix())
# Set package.files so the executor will attempt to hash the package
package.files = [
......@@ -693,19 +685,17 @@ def test_executor_should_write_pep610_url_references_for_directories(
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
artifact_cache: ArtifactCache,
io: BufferedIO,
wheel: Path,
fixture_dir: FixtureDirGetter,
):
url = (
Path(__file__)
.parent.parent.joinpath("fixtures/git/github.com/demo/demo")
.resolve()
)
url = (fixture_dir("git") / "github.com" / "demo" / "demo").resolve()
package = Package(
"demo", "0.1.2", source_type="directory", source_url=url.as_posix()
)
chef = Chef(config, tmp_venv, Factory.create_pool(config))
chef = Chef(artifact_cache, tmp_venv, Factory.create_pool(config))
chef.set_directory_wheel(wheel)
executor = Executor(tmp_venv, pool, config, io)
......@@ -720,14 +710,12 @@ def test_executor_should_write_pep610_url_references_for_editable_directories(
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
artifact_cache: ArtifactCache,
io: BufferedIO,
wheel: Path,
fixture_dir: FixtureDirGetter,
):
url = (
Path(__file__)
.parent.parent.joinpath("fixtures/git/github.com/demo/demo")
.resolve()
)
url = (fixture_dir("git") / "github.com" / "demo" / "demo").resolve()
package = Package(
"demo",
"0.1.2",
......@@ -736,7 +724,7 @@ def test_executor_should_write_pep610_url_references_for_editable_directories(
develop=True,
)
chef = Chef(config, tmp_venv, Factory.create_pool(config))
chef = Chef(artifact_cache, tmp_venv, Factory.create_pool(config))
chef.set_directory_wheel(wheel)
executor = Executor(tmp_venv, pool, config, io)
......@@ -761,7 +749,7 @@ def test_executor_should_write_pep610_url_references_for_wheel_urls(
if is_artifact_cached:
link_cached = fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
mocker.patch(
"poetry.installation.chef.Chef.get_cached_archive_for_link",
"poetry.installation.executor.ArtifactCache.get_cached_archive_for_link",
return_value=link_cached,
)
download_spy = mocker.spy(Executor, "_download_archive")
......@@ -840,7 +828,7 @@ def test_executor_should_write_pep610_url_references_for_non_wheel_urls(
cached_sdist = fixture_dir("distributions") / "demo-0.1.0.tar.gz"
cached_wheel = fixture_dir("distributions") / "demo-0.1.0-py2.py3-none-any.whl"
def mock_get_cached_archive_for_link_func(_: Link, strict: bool):
def mock_get_cached_archive_for_link_func(_: Link, *, strict: bool, **__: Any):
if is_wheel_cached and not strict:
return cached_wheel
if is_sdist_cached:
......@@ -848,7 +836,7 @@ def test_executor_should_write_pep610_url_references_for_non_wheel_urls(
return None
mocker.patch(
"poetry.installation.chef.Chef.get_cached_archive_for_link",
"poetry.installation.executor.ArtifactCache.get_cached_archive_for_link",
side_effect=mock_get_cached_archive_for_link_func,
)
......@@ -898,6 +886,7 @@ def test_executor_should_write_pep610_url_references_for_git(
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
artifact_cache: ArtifactCache,
io: BufferedIO,
mock_file_downloads: None,
wheel: Path,
......@@ -911,7 +900,7 @@ def test_executor_should_write_pep610_url_references_for_git(
source_url="https://github.com/demo/demo.git",
)
chef = Chef(config, tmp_venv, Factory.create_pool(config))
chef = Chef(artifact_cache, tmp_venv, Factory.create_pool(config))
chef.set_directory_wheel(wheel)
executor = Executor(tmp_venv, pool, config, io)
......@@ -936,6 +925,7 @@ def test_executor_should_append_subdirectory_for_git(
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
artifact_cache: ArtifactCache,
io: BufferedIO,
mock_file_downloads: None,
wheel: Path,
......@@ -950,7 +940,7 @@ def test_executor_should_append_subdirectory_for_git(
source_subdirectory="two",
)
chef = Chef(config, tmp_venv, Factory.create_pool(config))
chef = Chef(artifact_cache, tmp_venv, Factory.create_pool(config))
chef.set_directory_wheel(wheel)
spy = mocker.spy(chef, "prepare")
......@@ -966,6 +956,7 @@ def test_executor_should_write_pep610_url_references_for_git_with_subdirectories
tmp_venv: VirtualEnv,
pool: RepositoryPool,
config: Config,
artifact_cache: ArtifactCache,
io: BufferedIO,
mock_file_downloads: None,
wheel: Path,
......@@ -980,7 +971,7 @@ def test_executor_should_write_pep610_url_references_for_git_with_subdirectories
source_subdirectory="two",
)
chef = Chef(config, tmp_venv, Factory.create_pool(config))
chef = Chef(artifact_cache, tmp_venv, Factory.create_pool(config))
chef.set_directory_wheel(wheel)
executor = Executor(tmp_venv, pool, config, io)
......@@ -1040,6 +1031,7 @@ def test_executor_fallback_on_poetry_create_error_without_wheel_installer(
tmp_dir: str,
mock_file_downloads: None,
env: MockEnv,
fixture_dir: FixtureDirGetter,
):
mock_pip_install = mocker.patch("poetry.installation.executor.pip_install")
mock_sdist_builder = mocker.patch("poetry.core.masonry.builders.sdist.SdistBuilder")
......@@ -1063,10 +1055,7 @@ def test_executor_fallback_on_poetry_create_error_without_wheel_installer(
"simple-project",
"1.2.3",
source_type="directory",
source_url=Path(__file__)
.parent.parent.joinpath("fixtures/simple_project")
.resolve()
.as_posix(),
source_url=fixture_dir("simple_project").resolve().as_posix(),
)
return_code = executor.execute(
......@@ -1105,6 +1094,7 @@ def test_build_backend_errors_are_reported_correctly_if_caused_by_subprocess(
tmp_dir: str,
mock_file_downloads: None,
env: MockEnv,
fixture_dir: FixtureDirGetter,
) -> None:
error = BuildBackendException(
CalledProcessError(1, ["pip"], output=b"Error on stdout")
......@@ -1120,10 +1110,7 @@ def test_build_backend_errors_are_reported_correctly_if_caused_by_subprocess(
package_name,
package_version,
source_type="directory",
source_url=Path(__file__)
.parent.parent.joinpath("fixtures/simple_project")
.resolve()
.as_posix(),
source_url=fixture_dir("simple_project").resolve().as_posix(),
develop=editable,
)
# must not be included in the error message
......
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
from typing import TypeVar
......@@ -9,18 +10,21 @@ from unittest.mock import Mock
import pytest
from cachy import CacheManager
from packaging.tags import Tag
from poetry.core.packages.utils.link import Link
from poetry.utils.cache import ArtifactCache
from poetry.utils.cache import FileCache
from poetry.utils.env import MockEnv
if TYPE_CHECKING:
from pathlib import Path
from _pytest.monkeypatch import MonkeyPatch
from pytest import FixtureRequest
from pytest_mock import MockerFixture
from tests.conftest import Config
from tests.types import FixtureDirGetter
FILE_CACHE = Union[FileCache, CacheManager]
......@@ -192,3 +196,139 @@ def test_cachy_compatibility(
assert cachy_file_cache.get("key3") == test_str
assert cachy_file_cache.get("key4") == test_obj
def test_get_cache_directory_for_link(tmp_path: Path) -> None:
cache = ArtifactCache(cache_dir=tmp_path)
directory = cache.get_cache_directory_for_link(
Link("https://files.python-poetry.org/poetry-1.1.0.tar.gz")
)
expected = Path(
f"{tmp_path.as_posix()}/11/4f/a8/"
"1c89d75547e4967082d30a28360401c82c83b964ddacee292201bf85f2"
)
assert directory == expected
def test_get_cached_archives_for_link(
fixture_dir: FixtureDirGetter, mocker: MockerFixture
) -> None:
distributions = fixture_dir("distributions")
cache = ArtifactCache(cache_dir=Path())
mocker.patch.object(
cache,
"get_cache_directory_for_link",
return_value=distributions,
)
archives = cache._get_cached_archives_for_link(
Link("https://files.python-poetry.org/demo-0.1.0.tar.gz")
)
assert archives
assert set(archives) == set(distributions.glob("demo-0.1.*"))
@pytest.mark.parametrize(
("link", "strict", "available_packages"),
[
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
True,
[
Path("/cache/demo-0.1.0-py2.py3-none-any"),
Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
],
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
[],
),
],
)
def test_get_not_found_cached_archive_for_link(
mocker: MockerFixture,
link: str,
strict: bool,
available_packages: list[Path],
) -> None:
env = MockEnv(
version_info=(3, 8, 3),
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"},
supported_tags=[
Tag("cp38", "cp38", "macosx_10_15_x86_64"),
Tag("py3", "none", "any"),
],
)
cache = ArtifactCache(cache_dir=Path())
mocker.patch.object(
cache,
"_get_cached_archives_for_link",
return_value=available_packages,
)
archive = cache.get_cached_archive_for_link(Link(link), strict=strict, env=env)
assert archive is None
@pytest.mark.parametrize(
("link", "cached", "strict"),
[
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
False,
),
(
"https://files.python-poetry.org/demo-0.1.0.tar.gz",
"/cache/demo-0.1.0.tar.gz",
True,
),
(
"https://example.com/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
"/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl",
True,
),
],
)
def test_get_found_cached_archive_for_link(
mocker: MockerFixture,
link: str,
cached: str,
strict: bool,
) -> None:
env = MockEnv(
version_info=(3, 8, 3),
marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"},
supported_tags=[
Tag("cp38", "cp38", "macosx_10_15_x86_64"),
Tag("py3", "none", "any"),
],
)
cache = ArtifactCache(cache_dir=Path())
mocker.patch.object(
cache,
"_get_cached_archives_for_link",
return_value=[
Path("/cache/demo-0.1.0-py2.py3-none-any"),
Path("/cache/demo-0.1.0.tar.gz"),
Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
],
)
archive = cache.get_cached_archive_for_link(Link(link), strict=strict, env=env)
assert Path(cached) == archive
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment