Commit 1aa4d5b8 by Anselm Hahn Committed by GitHub

Ideas for refactoring (#4962)

* Ideas for refactoring

And also refering to issue #4961

* Respond to the review

Removed items() from os.environ and add blank lines

* Add link.is_wheel back
parent f6022ead
...@@ -21,10 +21,7 @@ class BuildCommand(EnvCommand): ...@@ -21,10 +21,7 @@ class BuildCommand(EnvCommand):
def handle(self) -> None: def handle(self) -> None:
from poetry.core.masonry.builder import Builder from poetry.core.masonry.builder import Builder
fmt = "all" fmt = self.option("format") or "all"
if self.option("format"):
fmt = self.option("format")
package = self.poetry.package package = self.poetry.package
self.line( self.line(
f"Building <c1>{package.pretty_name}</c1> (<c2>{package.version}</c2>)" f"Building <c1>{package.pretty_name}</c1> (<c2>{package.version}</c2>)"
......
...@@ -51,9 +51,9 @@ class CacheClearCommand(Command): ...@@ -51,9 +51,9 @@ class CacheClearCommand(Command):
return 0 return 0
# Calculate number of entries # Calculate number of entries
entries_count = 0 entries_count = sum(
for _path, _dirs, files in os.walk(str(cache_dir)): len(files) for _path, _dirs, files in os.walk(str(cache_dir))
entries_count += len(files) )
delete = self.confirm(f"<question>Delete {entries_count} entries?</>") delete = self.confirm(f"<question>Delete {entries_count} entries?</>")
if not delete: if not delete:
......
...@@ -70,9 +70,7 @@ You can specify a package in the following forms: ...@@ -70,9 +70,7 @@ You can specify a package in the following forms:
# Plugins should be installed in the system env to be globally available # Plugins should be installed in the system env to be globally available
system_env = EnvManager.get_system_env(naive=True) system_env = EnvManager.get_system_env(naive=True)
env_dir = Path( env_dir = Path(os.getenv("POETRY_HOME") or system_env.path)
os.getenv("POETRY_HOME") if os.getenv("POETRY_HOME") else system_env.path
)
# We check for the plugins existence first. # We check for the plugins existence first.
if env_dir.joinpath("pyproject.toml").exists(): if env_dir.joinpath("pyproject.toml").exists():
......
...@@ -40,9 +40,7 @@ class PluginRemoveCommand(Command): ...@@ -40,9 +40,7 @@ class PluginRemoveCommand(Command):
plugins = self.argument("plugins") plugins = self.argument("plugins")
system_env = EnvManager.get_system_env(naive=True) system_env = EnvManager.get_system_env(naive=True)
env_dir = Path( env_dir = Path(os.getenv("POETRY_HOME") or system_env.path)
os.getenv("POETRY_HOME") if os.getenv("POETRY_HOME") else system_env.path
)
# From this point forward, all the logic will be deferred to # From this point forward, all the logic will be deferred to
# the remove command, by using the global `pyproject.toml` file. # the remove command, by using the global `pyproject.toml` file.
......
...@@ -50,11 +50,10 @@ list of installed packages ...@@ -50,11 +50,10 @@ list of installed packages
if group is None: if group is None:
removed = [] removed = []
group_sections = [] group_sections = [
for group_name, group_section in poetry_content.get("group", {}).items(): (group_name, group_section.get("dependencies", {}))
group_sections.append( for group_name, group_section in poetry_content.get("group", {}).items()
(group_name, group_section.get("dependencies", {})) ]
)
for group_name, section in [ for group_name, section in [
("default", poetry_content["dependencies"]) ("default", poetry_content["dependencies"])
......
...@@ -160,13 +160,12 @@ class Factory(BaseFactory): ...@@ -160,13 +160,12 @@ class Factory(BaseFactory):
from poetry.utils.helpers import get_cert from poetry.utils.helpers import get_cert
from poetry.utils.helpers import get_client_cert from poetry.utils.helpers import get_client_cert
if "url" in source: if "url" not in source:
# PyPI-like repository
if "name" not in source:
raise RuntimeError("Missing [name] in source.")
else:
raise RuntimeError("Unsupported source specified") raise RuntimeError("Unsupported source specified")
# PyPI-like repository
if "name" not in source:
raise RuntimeError("Missing [name] in source.")
name = source["name"] name = source["name"]
url = source["url"] url = source["url"]
......
...@@ -322,10 +322,7 @@ class PackageInfo: ...@@ -322,10 +322,7 @@ class PackageInfo:
if python_requires is None: if python_requires is None:
python_requires = "*" python_requires = "*"
requires = "" requires = "".join(dep + "\n" for dep in result["install_requires"])
for dep in result["install_requires"]:
requires += dep + "\n"
if result["extras_require"]: if result["extras_require"]:
requires += "\n" requires += "\n"
......
...@@ -85,14 +85,13 @@ class Chooser: ...@@ -85,14 +85,13 @@ class Chooser:
return chosen return chosen
def _get_links(self, package: "Package") -> List["Link"]: def _get_links(self, package: "Package") -> List["Link"]:
if not package.source_type: if package.source_type:
if not self._pool.has_repository("pypi"):
repository = self._pool.repositories[0]
else:
repository = self._pool.repository("pypi")
else:
repository = self._pool.repository(package.source_reference) repository = self._pool.repository(package.source_reference)
elif not self._pool.has_repository("pypi"):
repository = self._pool.repositories[0]
else:
repository = self._pool.repository("pypi")
links = repository.find_links_for_package(package) links = repository.find_links_for_package(package)
hashes = [f["hash"] for f in package.files] hashes = [f["hash"] for f in package.files]
...@@ -142,7 +141,6 @@ class Chooser: ...@@ -142,7 +141,6 @@ class Chooser:
comparison operators, but then different sdist links comparison operators, but then different sdist links
with the same version, would have to be considered equal with the same version, would have to be considered equal
""" """
support_num = len(self._env.supported_tags)
build_tag = () build_tag = ()
binary_preference = 0 binary_preference = 0
if link.is_wheel: if link.is_wheel:
...@@ -160,6 +158,7 @@ class Chooser: ...@@ -160,6 +158,7 @@ class Chooser:
build_tag_groups = match.groups() build_tag_groups = match.groups()
build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
else: # sdist else: # sdist
support_num = len(self._env.supported_tags)
pri = -support_num pri = -support_num
has_allowed_hash = int(self._is_link_hash_allowed_for_package(link, package)) has_allowed_hash = int(self._is_link_hash_allowed_for_package(link, package))
......
...@@ -108,17 +108,14 @@ class EditableBuilder(Builder): ...@@ -108,17 +108,14 @@ class EditableBuilder(Builder):
os.remove(str(setup)) os.remove(str(setup))
def _add_pth(self) -> List[Path]: def _add_pth(self) -> List[Path]:
paths = set() paths = {
for include in self._module.includes: include.base.resolve().as_posix()
if isinstance(include, PackageInclude) and ( for include in self._module.includes
include.is_module() or include.is_package() if isinstance(include, PackageInclude)
): and (include.is_module() or include.is_package())
paths.add(include.base.resolve().as_posix()) }
content = "" content = "".join(decode(path + os.linesep) for path in paths)
for path in paths:
content += decode(path + os.linesep)
pth_file = Path(self._module.name).with_suffix(".pth") pth_file = Path(self._module.name).with_suffix(".pth")
# remove any pre-existing pth files for this package # remove any pre-existing pth files for this package
......
...@@ -201,12 +201,10 @@ class _Writer: ...@@ -201,12 +201,10 @@ class _Writer:
derived_cause: ConflictCause = derived.cause derived_cause: ConflictCause = derived.cause
if isinstance(derived_cause.conflict.cause, ConflictCause): if isinstance(derived_cause.conflict.cause, ConflictCause):
collapsed_derived = derived_cause.conflict collapsed_derived = derived_cause.conflict
collapsed_ext = derived_cause.other
else: else:
collapsed_derived = derived_cause.other collapsed_derived = derived_cause.other
if isinstance(derived_cause.conflict.cause, ConflictCause):
collapsed_ext = derived_cause.other
else:
collapsed_ext = derived_cause.conflict collapsed_ext = derived_cause.conflict
details_for_cause = {} details_for_cause = {}
......
...@@ -36,15 +36,12 @@ class Incompatibility: ...@@ -36,15 +36,12 @@ class Incompatibility:
if not term.is_positive() or not term.dependency.is_root if not term.is_positive() or not term.dependency.is_root
] ]
if ( if len(terms) != 1 and (
len(terms) == 1
# Short-circuit in the common case of a two-term incompatibility with # Short-circuit in the common case of a two-term incompatibility with
# two different packages (for example, a dependency). # two different packages (for example, a dependency).
or len(terms) == 2 len(terms) != 2
and terms[0].dependency.complete_name != terms[-1].dependency.complete_name or terms[0].dependency.complete_name == terms[-1].dependency.complete_name
): ):
pass
else:
# Coalesce multiple terms about the same package if possible. # Coalesce multiple terms about the same package if possible.
by_name: Dict[str, Dict[str, "Term"]] = {} by_name: Dict[str, Dict[str, "Term"]] = {}
for term in terms: for term in terms:
...@@ -178,22 +175,22 @@ class Incompatibility: ...@@ -178,22 +175,22 @@ class Incompatibility:
term2 = self._terms[1] term2 = self._terms[1]
if term1.is_positive() == term2.is_positive(): if term1.is_positive() == term2.is_positive():
if term1.is_positive(): if not term1.is_positive():
package1 = (
term1.dependency.name
if term1.constraint.is_any()
else self._terse(term1)
)
package2 = (
term2.dependency.name
if term2.constraint.is_any()
else self._terse(term2)
)
return f"{package1} is incompatible with {package2}"
else:
return f"either {self._terse(term1)} or {self._terse(term2)}" return f"either {self._terse(term1)} or {self._terse(term2)}"
package1 = (
term1.dependency.name
if term1.constraint.is_any()
else self._terse(term1)
)
package2 = (
term2.dependency.name
if term2.constraint.is_any()
else self._terse(term2)
)
return f"{package1} is incompatible with {package2}"
positive = [] positive = []
negative = [] negative = []
...@@ -204,12 +201,11 @@ class Incompatibility: ...@@ -204,12 +201,11 @@ class Incompatibility:
negative.append(self._terse(term)) negative.append(self._terse(term))
if positive and negative: if positive and negative:
if len(positive) == 1: if len(positive) != 1:
positive_term = [term for term in self._terms if term.is_positive()][0]
return f"{self._terse(positive_term, allow_every=True)} requires {' or '.join(negative)}"
else:
return f"if {' and '.join(positive)} then {' or '.join(negative)}" return f"if {' and '.join(positive)} then {' or '.join(negative)}"
positive_term = [term for term in self._terms if term.is_positive()][0]
return f"{self._terse(positive_term, allow_every=True)} requires {' or '.join(negative)}"
elif positive: elif positive:
return f"one of {' or '.join(positive)} must be false" return f"one of {' or '.join(positive)} must be false"
else: else:
......
...@@ -23,10 +23,7 @@ class PythonRequirementSolutionProvider(HasSolutionsForException): ...@@ -23,10 +23,7 @@ class PythonRequirementSolutionProvider(HasSolutionsForException):
str(exception), str(exception),
) )
if not m: return bool(m)
return False
return True
def get_solutions(self, exception: Exception) -> List["Solution"]: def get_solutions(self, exception: Exception) -> List["Solution"]:
from poetry.mixology.solutions.solutions.python_requirement_solution import ( from poetry.mixology.solutions.solutions.python_requirement_solution import (
......
...@@ -96,9 +96,7 @@ class VersionSolver: ...@@ -96,9 +96,7 @@ class VersionSolver:
Performs unit propagation on incompatibilities transitively Performs unit propagation on incompatibilities transitively
related to package to derive new assignments for _solution. related to package to derive new assignments for _solution.
""" """
changed = set() changed = {package}
changed.add(package)
while changed: while changed:
package = changed.pop() package = changed.pop()
...@@ -269,10 +267,9 @@ class VersionSolver: ...@@ -269,10 +267,9 @@ class VersionSolver:
# true (that is, we know for sure no solution will satisfy the # true (that is, we know for sure no solution will satisfy the
# incompatibility) while also approximating the intuitive notion of the # incompatibility) while also approximating the intuitive notion of the
# "root cause" of the conflict. # "root cause" of the conflict.
new_terms = [] new_terms = [
for term in incompatibility.terms: term for term in incompatibility.terms if term != most_recent_term
if term != most_recent_term: ]
new_terms.append(term)
for term in most_recent_satisfier.cause.terms: for term in most_recent_satisfier.cause.terms:
if term.dependency != most_recent_satisfier.dependency: if term.dependency != most_recent_satisfier.dependency:
......
...@@ -94,9 +94,6 @@ class Solver: ...@@ -94,9 +94,6 @@ class Solver:
def solve_in_compatibility_mode( def solve_in_compatibility_mode(
self, overrides: Tuple[Dict, ...], use_latest: List[str] = None self, overrides: Tuple[Dict, ...], use_latest: List[str] = None
) -> Tuple[List["Package"], List[int]]: ) -> Tuple[List["Package"], List[int]]:
locked = {}
for package in self._locked.packages:
locked[package.name] = DependencyPackage(package.to_dependency(), package)
packages = [] packages = []
depths = [] depths = []
...@@ -127,9 +124,10 @@ class Solver: ...@@ -127,9 +124,10 @@ class Solver:
if self._provider._overrides: if self._provider._overrides:
self._overrides.append(self._provider._overrides) self._overrides.append(self._provider._overrides)
locked = {} locked = {
for package in self._locked.packages: package.name: DependencyPackage(package.to_dependency(), package)
locked[package.name] = DependencyPackage(package.to_dependency(), package) for package in self._locked.packages
}
try: try:
result = resolve_version( result = resolve_version(
...@@ -218,16 +216,16 @@ def depth_first_search( ...@@ -218,16 +216,16 @@ def depth_first_search(
name_children[node.name].extend(node.reachable()) name_children[node.name].extend(node.reachable())
combined_nodes[node.name].append(node) combined_nodes[node.name].append(node)
combined_topo_sorted_nodes = [] combined_topo_sorted_nodes = [
for node in topo_sorted_nodes: combined_nodes.pop(node.name)
if node.name in combined_nodes: for node in topo_sorted_nodes
combined_topo_sorted_nodes.append(combined_nodes.pop(node.name)) if node.name in combined_nodes
]
results = [ return [
aggregator(nodes, name_children[nodes[0].name]) aggregator(nodes, name_children[nodes[0].name])
for nodes in combined_topo_sorted_nodes for nodes in combined_topo_sorted_nodes
] ]
return results
def dfs_visit( def dfs_visit(
...@@ -333,21 +331,20 @@ class PackageNode(DFSNode): ...@@ -333,21 +331,20 @@ class PackageNode(DFSNode):
continue continue
for pkg in self.packages: for pkg in self.packages:
if pkg.complete_name == dependency.complete_name and ( if (
dependency.constraint.allows(pkg.version) pkg.complete_name == dependency.complete_name
or dependency.allows_prereleases() and (
and pkg.version.is_unstable() dependency.constraint.allows(pkg.version)
and dependency.constraint.allows(pkg.version.stable) or dependency.allows_prereleases()
): and pkg.version.is_unstable()
# If there is already a child with this name and dependency.constraint.allows(pkg.version.stable)
# we merge the requirements )
if any( and not any(
child.package.name == pkg.name child.package.name == pkg.name
and child.groups == dependency.groups and child.groups == dependency.groups
for child in children for child in children
): )
continue ):
children.append( children.append(
PackageNode( PackageNode(
pkg, pkg,
......
...@@ -65,12 +65,10 @@ class Transaction: ...@@ -65,12 +65,10 @@ class Transaction:
if with_uninstalls: if with_uninstalls:
for current_package in self._current_packages: for current_package in self._current_packages:
found = False found = any(
for result_package, _ in self._result_packages: current_package.name == result_package.name
if current_package.name == result_package.name: for result_package, _ in self._result_packages
found = True )
break
if not found: if not found:
for installed_package in self._installed_packages: for installed_package in self._installed_packages:
......
...@@ -146,19 +146,18 @@ class InstalledRepository(Repository): ...@@ -146,19 +146,18 @@ class InstalledRepository(Repository):
if is_editable_package: if is_editable_package:
source_type = "directory" source_type = "directory"
source_url = paths.pop().as_posix() source_url = paths.pop().as_posix()
elif cls.is_vcs_package(path, env):
(
source_type,
source_url,
source_reference,
) = cls.get_package_vcs_properties_from_path(
env.path / "src" / canonicalize_name(distribution.metadata["name"])
)
else: else:
if cls.is_vcs_package(path, env): # If not, it's a path dependency
( source_type = "directory"
source_type, source_url = str(path.parent)
source_url,
source_reference,
) = cls.get_package_vcs_properties_from_path(
env.path / "src" / canonicalize_name(distribution.metadata["name"])
)
else:
# If not, it's a path dependency
source_type = "directory"
source_url = str(path.parent)
package = Package( package = Package(
distribution.metadata["name"], distribution.metadata["name"],
......
...@@ -326,9 +326,7 @@ class PyPiRepository(RemoteRepository): ...@@ -326,9 +326,7 @@ class PyPiRepository(RemoteRepository):
if json_response.status_code == 404: if json_response.status_code == 404:
return None return None
json_data = json_response.json() return json_response.json()
return json_data
def _get_info_from_urls(self, urls: Dict[str, List[str]]) -> "PackageInfo": def _get_info_from_urls(self, urls: Dict[str, List[str]]) -> "PackageInfo":
# Checking wheels first as they are more likely to hold # Checking wheels first as they are more likely to hold
......
...@@ -101,16 +101,14 @@ def user_data_dir(appname: str, roaming: bool = False) -> str: ...@@ -101,16 +101,14 @@ def user_data_dir(appname: str, roaming: bool = False) -> str:
""" """
if WINDOWS: if WINDOWS:
const = "CSIDL_APPDATA" if roaming else "CSIDL_LOCAL_APPDATA" const = "CSIDL_APPDATA" if roaming else "CSIDL_LOCAL_APPDATA"
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) return os.path.join(os.path.normpath(_get_win_folder(const)), appname)
elif sys.platform == "darwin": elif sys.platform == "darwin":
path = os.path.join(expanduser("~/Library/Application Support/"), appname) return os.path.join(expanduser("~/Library/Application Support/"), appname)
else: else:
path = os.path.join( return os.path.join(
os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname
) )
return path
def user_config_dir(appname: str, roaming: bool = True) -> str: def user_config_dir(appname: str, roaming: bool = True) -> str:
"""Return full path to the user-specific config dir for this application. """Return full path to the user-specific config dir for this application.
...@@ -220,11 +218,7 @@ def _get_win_folder_with_ctypes(csidl_name: str) -> str: ...@@ -220,11 +218,7 @@ def _get_win_folder_with_ctypes(csidl_name: str) -> str:
# Downgrade to short path name if have highbit chars. See # Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False has_high_char = any(ord(c) > 255 for c in buf)
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char: if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024) buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
......
...@@ -115,14 +115,10 @@ class Authenticator: ...@@ -115,14 +115,10 @@ class Authenticator:
# behaves if more than one @ is present (which can be checked using # behaves if more than one @ is present (which can be checked using
# the password attribute of urlsplit()'s return value). # the password attribute of urlsplit()'s return value).
auth, netloc = netloc.rsplit("@", 1) auth, netloc = netloc.rsplit("@", 1)
if ":" in auth: # Split from the left because that's how urllib.parse.urlsplit()
# Split from the left because that's how urllib.parse.urlsplit() # behaves if more than one : is present (which again can be checked
# behaves if more than one : is present (which again can be checked # using the password attribute of the return value)
# using the password attribute of the return value) credentials = auth.split(":", 1) if ":" in auth else (auth, None)
credentials = auth.split(":", 1)
else:
credentials = auth, None
credentials = tuple( credentials = tuple(
None if x is None else urllib.parse.unquote(x) for x in credentials None if x is None else urllib.parse.unquote(x) for x in credentials
) )
...@@ -166,8 +162,6 @@ class Authenticator: ...@@ -166,8 +162,6 @@ class Authenticator:
def _get_credentials_for_netloc( def _get_credentials_for_netloc(
self, netloc: str self, netloc: str
) -> Tuple[Optional[str], Optional[str]]: ) -> Tuple[Optional[str], Optional[str]]:
credentials = (None, None)
for repository_name in self._config.get("repositories", []): for repository_name in self._config.get("repositories", []):
auth = self._get_http_auth(repository_name, netloc) auth = self._get_http_auth(repository_name, netloc)
...@@ -176,7 +170,7 @@ class Authenticator: ...@@ -176,7 +170,7 @@ class Authenticator:
return auth["username"], auth["password"] return auth["username"], auth["password"]
return credentials return None, None
def _get_credentials_for_netloc_from_keyring( def _get_credentials_for_netloc_from_keyring(
self, url: str, netloc: str, username: Optional[str] self, url: str, netloc: str, username: Optional[str]
......
...@@ -253,11 +253,10 @@ class SitePackages: ...@@ -253,11 +253,10 @@ class SitePackages:
return [path] return [path]
except ValueError: except ValueError:
pass pass
else: site_type = "writable " if writable_only else ""
site_type = "writable " if writable_only else "" raise ValueError(
raise ValueError( f"{path} is not relative to any discovered {site_type}sites"
f"{path} is not relative to any discovered {site_type}sites" )
)
results = [candidate / path for candidate in candidates if candidate] results = [candidate / path for candidate in candidates if candidate]
...@@ -284,8 +283,7 @@ class SitePackages: ...@@ -284,8 +283,7 @@ class SitePackages:
) -> Optional[metadata.PathDistribution]: ) -> Optional[metadata.PathDistribution]:
for distribution in self.distributions(name=name, writable_only=writable_only): for distribution in self.distributions(name=name, writable_only=writable_only):
return distribution return distribution
else: return None
return None
def find_distribution_files_with_suffix( def find_distribution_files_with_suffix(
self, distribution_name: str, suffix: str, writable_only: bool = False self, distribution_name: str, suffix: str, writable_only: bool = False
...@@ -1333,7 +1331,7 @@ class Env: ...@@ -1333,7 +1331,7 @@ class Env:
""" """
call = kwargs.pop("call", False) call = kwargs.pop("call", False)
input_ = kwargs.pop("input_", None) input_ = kwargs.pop("input_", None)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()}) env = kwargs.pop("env", dict(os.environ))
try: try:
if self._is_windows: if self._is_windows:
...@@ -1364,14 +1362,14 @@ class Env: ...@@ -1364,14 +1362,14 @@ class Env:
def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]: def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]:
command = self.get_command_from_bin(bin) + list(args) command = self.get_command_from_bin(bin) + list(args)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()}) env = kwargs.pop("env", dict(os.environ))
if not self._is_windows: if not self._is_windows:
return os.execvpe(command[0], command, env=env) return os.execvpe(command[0], command, env=env)
else:
exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs) exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
exe.communicate() exe.communicate()
return exe.returncode return exe.returncode
def is_venv(self) -> bool: def is_venv(self) -> bool:
raise NotImplementedError() raise NotImplementedError()
...@@ -1501,12 +1499,9 @@ class SystemEnv(Env): ...@@ -1501,12 +1499,9 @@ class SystemEnv(Env):
"platform_version": platform.version(), "platform_version": platform.version(),
"python_full_version": platform.python_version(), "python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(), "platform_python_implementation": platform.python_implementation(),
"python_version": ".".join( "python_version": ".".join(platform.python_version().split(".")[:2]),
v for v in platform.python_version().split(".")[:2]
),
"sys_platform": sys.platform, "sys_platform": sys.platform,
"version_info": sys.version_info, "version_info": sys.version_info,
# Extra information
"interpreter_name": interpreter_name(), "interpreter_name": interpreter_name(),
"interpreter_version": interpreter_version(), "interpreter_version": interpreter_version(),
} }
...@@ -1720,15 +1715,15 @@ class GenericEnv(VirtualEnv): ...@@ -1720,15 +1715,15 @@ class GenericEnv(VirtualEnv):
def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]: def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]:
command = self.get_command_from_bin(bin) + list(args) command = self.get_command_from_bin(bin) + list(args)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()}) env = kwargs.pop("env", dict(os.environ))
if not self._is_windows: if not self._is_windows:
return os.execvpe(command[0], command, env=env) return os.execvpe(command[0], command, env=env)
else:
exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
exe.communicate()
return exe.returncode exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
exe.communicate()
return exe.returncode
def _run(self, cmd: List[str], **kwargs: Any) -> Optional[int]: def _run(self, cmd: List[str], **kwargs: Any) -> Optional[int]:
return super(VirtualEnv, self)._run(cmd, **kwargs) return super(VirtualEnv, self)._run(cmd, **kwargs)
......
...@@ -37,7 +37,7 @@ class VersionSelector: ...@@ -37,7 +37,7 @@ class VersionSelector:
}, },
) )
candidates = self._pool.find_packages(dependency) candidates = self._pool.find_packages(dependency)
only_prereleases = all([c.version.is_unstable() for c in candidates]) only_prereleases = all(c.version.is_unstable() for c in candidates)
if not candidates: if not candidates:
return False return False
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment