Commit c640aaff by Sébastien Eustace

Fixed multiple constraint handling for the root package

parent 78963e69
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
- Fixed an error when resolving directory dependencies with no sub dependencies. - Fixed an error when resolving directory dependencies with no sub dependencies.
- Fixed an error when locking packages with no description. - Fixed an error when locking packages with no description.
- Fixed path resolution for transitive file dependencies. - Fixed path resolution for transitive file dependencies.
- Fixed multiple constraints handling for the root package.
## [0.12.10] - 2018-11-22 ## [0.12.10] - 2018-11-22
......
...@@ -374,8 +374,7 @@ class VersionSolver: ...@@ -374,8 +374,7 @@ class VersionSolver:
return dependency.name return dependency.name
if not version.is_root(): version = self._provider.complete_package(version)
version = self._provider.complete_package(version)
conflict = False conflict = False
for incompatibility in self._provider.incompatibilities_for(version): for incompatibility in self._provider.incompatibilities_for(version):
......
...@@ -11,6 +11,9 @@ class DependencyPackage(object): ...@@ -11,6 +11,9 @@ class DependencyPackage(object):
def package(self): def package(self):
return self._package return self._package
def clone(self): # type: () -> DependencyPackage
return self.__class__(self._dependency, self._package.clone())
def __getattr__(self, name): def __getattr__(self, name):
return getattr(self._package, name) return getattr(self._package, name)
......
...@@ -369,6 +369,9 @@ class Package(object): ...@@ -369,6 +369,9 @@ class Package(object):
for dep in self.requires: for dep in self.requires:
clone.requires.append(dep) clone.requires.append(dep)
for dep in self.dev_requires:
clone.dev_requires.append(dep)
return clone return clone
def __hash__(self): def __hash__(self):
......
...@@ -43,3 +43,13 @@ class ProjectPackage(Package): ...@@ -43,3 +43,13 @@ class ProjectPackage(Package):
self._python_marker = parse_marker( self._python_marker = parse_marker(
create_nested_marker("python_version", self._python_constraint) create_nested_marker("python_version", self._python_constraint)
) )
def clone(self): # type: () -> ProjectPackage
package = super(ProjectPackage, self).clone()
package.build = self.build
package.packages = self.packages[:]
package.include = self.include[:]
package.exclude = self.exclude[:]
return package
...@@ -453,9 +453,13 @@ class Provider: ...@@ -453,9 +453,13 @@ class Provider:
self, package self, package
): # type: (DependencyPackage) -> DependencyPackage ): # type: (DependencyPackage) -> DependencyPackage
if package.is_root(): if package.is_root():
return package package = package.clone()
if package.source_type not in {"directory", "file", "git"}: if not package.is_root() and package.source_type not in {
"directory",
"file",
"git",
}:
package = DependencyPackage( package = DependencyPackage(
package.dependency, package.dependency,
self._pool.package( self._pool.package(
...@@ -487,119 +491,114 @@ class Provider: ...@@ -487,119 +491,114 @@ class Provider:
# An example of this is: # An example of this is:
# - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
# - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
if not package.is_root(): duplicates = {}
duplicates = {} for dep in dependencies:
for dep in dependencies: if dep.name not in duplicates:
if dep.name not in duplicates: duplicates[dep.name] = []
duplicates[dep.name] = []
duplicates[dep.name].append(dep)
dependencies = []
for dep_name, deps in duplicates.items():
if len(deps) == 1:
dependencies.append(deps[0])
continue
self.debug( duplicates[dep.name].append(dep)
"<debug>Duplicate dependencies for {}</debug>".format(dep_name)
)
# Regrouping by constraint dependencies = []
by_constraint = {} for dep_name, deps in duplicates.items():
for dep in deps: if len(deps) == 1:
if dep.constraint not in by_constraint: dependencies.append(deps[0])
by_constraint[dep.constraint] = [] continue
by_constraint[dep.constraint].append(dep)
# We merge by constraint
for constraint, _deps in by_constraint.items():
new_markers = []
for dep in _deps:
pep_508_dep = dep.to_pep_508(False)
if ";" not in pep_508_dep:
continue
markers = pep_508_dep.split(";")[1].strip()
if not markers:
# One of the constraint has no markers
# so this means we don't actually need to merge
new_markers = []
break
new_markers.append("({})".format(markers))
if not new_markers:
dependencies += _deps
continue
dep = _deps[0] self.debug("<debug>Duplicate dependencies for {}</debug>".format(dep_name))
new_requirement = "{}; {}".format(
dep.to_pep_508(False).split(";")[0], " or ".join(new_markers)
)
new_dep = dependency_from_pep_508(new_requirement)
if dep.is_optional() and not dep.is_activated():
new_dep.deactivate()
else:
new_dep.activate()
by_constraint[constraint] = [new_dep] # Regrouping by constraint
by_constraint = {}
for dep in deps:
if dep.constraint not in by_constraint:
by_constraint[dep.constraint] = []
continue by_constraint[dep.constraint].append(dep)
if len(by_constraint) == 1:
self.debug(
"<debug>Merging requirements for {}</debug>".format(
str(deps[0])
)
)
dependencies.append(list(by_constraint.values())[0][0])
continue
# We leave dependencies as-is if they have the same # We merge by constraint
# python/platform constraints. for constraint, _deps in by_constraint.items():
# That way the resolver will pickup the conflict new_markers = []
# and display a proper error. for dep in _deps:
_deps = [value[0] for value in by_constraint.values()] pep_508_dep = dep.to_pep_508(False)
seen = set()
for _dep in _deps:
pep_508_dep = _dep.to_pep_508(False)
if ";" not in pep_508_dep: if ";" not in pep_508_dep:
_requirements = "" continue
else:
_requirements = pep_508_dep.split(";")[1].strip()
if _requirements not in seen: markers = pep_508_dep.split(";")[1].strip()
seen.add(_requirements) if not markers:
# One of the constraint has no markers
# so this means we don't actually need to merge
new_markers = []
break
if len(_deps) != len(seen): new_markers.append("({})".format(markers))
for _dep in _deps:
dependencies.append(_dep)
if not new_markers:
dependencies += _deps
continue continue
# At this point, we raise an exception that will dep = _deps[0]
# tell the solver to enter compatibility mode new_requirement = "{}; {}".format(
# which means it will resolve for subsets dep.to_pep_508(False).split(";")[0], " or ".join(new_markers)
# Python constraints )
# new_dep = dependency_from_pep_508(new_requirement)
# For instance, if our root package requires Python ~2.7 || ^3.6 if dep.is_optional() and not dep.is_activated():
# And we have one dependency that requires Python <3.6 new_dep.deactivate()
# and the other Python >=3.6 than the solver will solve else:
# dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 new_dep.activate()
# and Python >=3.6,<4.0
python_constraints = [] by_constraint[constraint] = [new_dep]
for constraint, _deps in by_constraint.items():
python_constraints.append(_deps[0].python_versions) continue
_deps = [str(_dep[0]) for _dep in by_constraint.values()] if len(by_constraint) == 1:
self.debug( self.debug(
"<warning>Different requirements found for {}.</warning>".format( "<debug>Merging requirements for {}</debug>".format(str(deps[0]))
", ".join(_deps[:-1]) + " and " + _deps[-1]
)
) )
raise CompatibilityError(*python_constraints) dependencies.append(list(by_constraint.values())[0][0])
continue
# We leave dependencies as-is if they have the same
# python/platform constraints.
# That way the resolver will pickup the conflict
# and display a proper error.
_deps = [value[0] for value in by_constraint.values()]
seen = set()
for _dep in _deps:
pep_508_dep = _dep.to_pep_508(False)
if ";" not in pep_508_dep:
_requirements = ""
else:
_requirements = pep_508_dep.split(";")[1].strip()
if _requirements not in seen:
seen.add(_requirements)
if len(_deps) != len(seen):
for _dep in _deps:
dependencies.append(_dep)
continue
# At this point, we raise an exception that will
# tell the solver to enter compatibility mode
# which means it will resolve for subsets
# Python constraints
#
# For instance, if our root package requires Python ~2.7 || ^3.6
# And we have one dependency that requires Python <3.6
# and the other Python >=3.6 than the solver will solve
# dependencies for Python >=2.7,<2.8 || >=3.4,<3.6
# and Python >=3.6,<4.0
python_constraints = []
for constraint, _deps in by_constraint.items():
python_constraints.append(_deps[0].python_versions)
_deps = [str(_dep[0]) for _dep in by_constraint.values()]
self.debug(
"<warning>Different requirements found for {}.</warning>".format(
", ".join(_deps[:-1]) + " and " + _deps[-1]
)
)
raise CompatibilityError(*python_constraints)
# Modifying dependencies as needed # Modifying dependencies as needed
for dep in dependencies: for dep in dependencies:
......
...@@ -1552,3 +1552,21 @@ def test_solver_skips_invalid_versions(package, installed, locked, io): ...@@ -1552,3 +1552,21 @@ def test_solver_skips_invalid_versions(package, installed, locked, io):
check_solver_result( check_solver_result(
ops, [{"job": "install", "package": get_package("trackpy", "0.4.1")}] ops, [{"job": "install", "package": get_package("trackpy", "0.4.1")}]
) )
def test_multiple_constraints_on_root(package, solver, repo):
package.add_dependency("foo", {"version": "^1.0", "python": "^2.7"})
package.add_dependency("foo", {"version": "^2.0", "python": "^3.7"})
foo15 = get_package("foo", "1.5.0")
foo25 = get_package("foo", "2.5.0")
repo.add_package(foo15)
repo.add_package(foo25)
ops = solver.solve()
check_solver_result(
ops,
[{"job": "install", "package": foo15}, {"job": "install", "package": foo25}],
)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment