Commit 7bb0a876 by Randy Döring Committed by GitHub

move export code into export plugin (#6128)

parent fdbe6e3a
......@@ -455,14 +455,15 @@ importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""}
[[package]]
name = "poetry-plugin-export"
version = "1.0.5"
version = "1.0.6"
description = "Poetry plugin to export the dependencies to various formats"
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
poetry = ">=1.2.0b1dev0,<2.0.0"
poetry = ">=1.2.0b3,<2.0.0"
poetry-core = ">=1.1.0b3,<2.0.0"
[[package]]
name = "pre-commit"
......@@ -887,7 +888,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
content-hash = "33fbd283e96bc751b1f618e7b1c5b1738b3f570bc1e2081a0eb8dd873c219b11"
content-hash = "5f82ec1a5c13c6bec3e428cb9468204f986d5ebcbe0b2d5e19e929c4f16e816b"
[metadata.files]
atomicwrites = [
......@@ -1242,8 +1243,8 @@ poetry-core = [
{file = "poetry_core-1.1.0b3-py3-none-any.whl", hash = "sha256:2b65b697e22fc6fc9b60afcadfd4421db796d30d558a003654b5e67e3b262eaa"},
]
poetry-plugin-export = [
{file = "poetry-plugin-export-1.0.5.tar.gz", hash = "sha256:e7796292eaafac1316149f3a8070923c2a2214c98d046de1246b4d8eb0b0c84b"},
{file = "poetry_plugin_export-1.0.5-py3-none-any.whl", hash = "sha256:925088b27c5747c3e0d8cc2bbb692a533eb4569874a676ac0d0f5ab237b15aaa"},
{file = "poetry-plugin-export-1.0.6.tar.gz", hash = "sha256:af870afceb38e583afa57bcfadfa5cd35ebd74e35aacadcb802bb3a073c13adb"},
{file = "poetry_plugin_export-1.0.6-py3-none-any.whl", hash = "sha256:55ae87d4560a6a3f96e04eba63c78cadbd0410e9652dee0ee1cde93281e9cb48"},
]
pre-commit = [
{file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"},
......
......@@ -45,7 +45,7 @@ generate-setup-file = false
python = "^3.7"
poetry-core = "^1.1.0b3"
poetry-plugin-export = "^1.0.5"
poetry-plugin-export = "^1.0.6"
cachecontrol = { version = "^0.12.9", extras = ["filecache"] }
cachy = "^0.3.0"
cleo = "^1.0.0a5"
......
......@@ -5,7 +5,6 @@ import logging
import os
import re
from copy import deepcopy
from hashlib import sha256
from pathlib import Path
from typing import TYPE_CHECKING
......@@ -27,20 +26,12 @@ from tomlkit import table
from tomlkit.exceptions import TOMLKitError
from tomlkit.items import Array
from poetry.packages import DependencyPackage
from poetry.utils.extras import get_extra_package_names
if TYPE_CHECKING:
from collections.abc import Iterable
from collections.abc import Iterator
from collections.abc import Sequence
from poetry.core.packages.directory_dependency import DirectoryDependency
from poetry.core.packages.file_dependency import FileDependency
from poetry.core.packages.url_dependency import URLDependency
from poetry.core.packages.vcs_dependency import VCSDependency
from poetry.core.version.markers import BaseMarker
from tomlkit.items import Table
from tomlkit.toml_document import TOMLDocument
......@@ -206,175 +197,6 @@ class Locker:
return packages
@staticmethod
def __get_locked_package(
dependency: Dependency,
packages_by_name: dict[str, list[Package]],
decided: dict[Package, Dependency] | None = None,
) -> Package | None:
"""
Internal helper to identify corresponding locked package using dependency
version constraints.
"""
decided = decided or {}
# Get the packages that are consistent with this dependency.
packages = [
package
for package in packages_by_name.get(dependency.name, [])
if package.python_constraint.allows_all(dependency.python_constraint)
and dependency.constraint.allows(package.version)
]
# If we've previously made a choice that is compatible with the current
# requirement, stick with it.
for package in packages:
old_decision = decided.get(package)
if (
old_decision is not None
and not old_decision.marker.intersect(dependency.marker).is_empty()
):
return package
return next(iter(packages), None)
@classmethod
def __walk_dependencies(
cls,
dependencies: list[Dependency],
packages_by_name: dict[str, list[Package]],
) -> dict[Package, Dependency]:
nested_dependencies: dict[Package, Dependency] = {}
visited: set[tuple[Dependency, BaseMarker]] = set()
while dependencies:
requirement = dependencies.pop(0)
if (requirement, requirement.marker) in visited:
continue
visited.add((requirement, requirement.marker))
locked_package = cls.__get_locked_package(
requirement, packages_by_name, nested_dependencies
)
if not locked_package:
raise RuntimeError(f"Dependency walk failed at {requirement}")
if requirement.extras:
locked_package = locked_package.with_features(requirement.extras)
# create dependency from locked package to retain dependency metadata
# if this is not done, we can end-up with incorrect nested dependencies
constraint = requirement.constraint
marker = requirement.marker
requirement = locked_package.to_dependency()
requirement.marker = requirement.marker.intersect(marker)
requirement.constraint = constraint
for require in locked_package.requires:
if require.is_optional() and not any(
require in locked_package.extras[feature]
for feature in locked_package.features
):
continue
require = deepcopy(require)
require.marker = require.marker.intersect(
requirement.marker.without_extras()
)
if not require.marker.is_empty():
dependencies.append(require)
key = locked_package
if key not in nested_dependencies:
nested_dependencies[key] = requirement
else:
nested_dependencies[key].marker = nested_dependencies[key].marker.union(
requirement.marker
)
return nested_dependencies
@classmethod
def get_project_dependencies(
cls,
project_requires: list[Dependency],
locked_packages: list[Package],
) -> Iterable[tuple[Package, Dependency]]:
# group packages entries by name, this is required because requirement might use
# different constraints.
packages_by_name: dict[str, list[Package]] = {}
for pkg in locked_packages:
if pkg.name not in packages_by_name:
packages_by_name[pkg.name] = []
packages_by_name[pkg.name].append(pkg)
# Put higher versions first so that we prefer them.
for packages in packages_by_name.values():
packages.sort(
key=lambda package: package.version,
reverse=True,
)
nested_dependencies = cls.__walk_dependencies(
dependencies=project_requires,
packages_by_name=packages_by_name,
)
return nested_dependencies.items()
def get_project_dependency_packages(
self,
project_requires: list[Dependency],
project_python_marker: BaseMarker | None = None,
extras: bool | Sequence[str] | None = None,
) -> Iterator[DependencyPackage]:
# Apply the project python marker to all requirements.
if project_python_marker is not None:
marked_requires: list[Dependency] = []
for require in project_requires:
require = deepcopy(require)
require.marker = require.marker.intersect(project_python_marker)
marked_requires.append(require)
project_requires = marked_requires
repository = self.locked_repository()
# Build a set of all packages required by our selected extras
extra_package_names: set[str] | None = None
if extras is not True:
extra_package_names = set(
get_extra_package_names(
repository.packages,
self.lock_data.get("extras", {}),
extras or (),
)
)
# If a package is optional and we haven't opted in to it, do not select
selected = []
for dependency in project_requires:
try:
package = repository.find_packages(dependency=dependency)[0]
except IndexError:
continue
if extra_package_names is not None and (
package.optional and package.name not in extra_package_names
):
# a package is locked as optional, but is not activated via extras
continue
selected.append(dependency)
for package, dependency in self.get_project_dependencies(
project_requires=selected,
locked_packages=repository.packages,
):
yield DependencyPackage(dependency=dependency, package=package)
def set_lock_data(self, root: Package, packages: list[Package]) -> bool:
files: dict[str, Any] = table()
package_specs = self._lock_packages(packages)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment