Commit 1aa4d5b8 by Anselm Hahn Committed by GitHub

Ideas for refactoring (#4962)

* Ideas for refactoring

And also refering to issue #4961

* Respond to the review

Removed items() from os.environ and add blank lines

* Add link.is_wheel back
parent f6022ead
......@@ -21,10 +21,7 @@ class BuildCommand(EnvCommand):
def handle(self) -> None:
from poetry.core.masonry.builder import Builder
fmt = "all"
if self.option("format"):
fmt = self.option("format")
fmt = self.option("format") or "all"
package = self.poetry.package
self.line(
f"Building <c1>{package.pretty_name}</c1> (<c2>{package.version}</c2>)"
......
......@@ -51,9 +51,9 @@ class CacheClearCommand(Command):
return 0
# Calculate number of entries
entries_count = 0
for _path, _dirs, files in os.walk(str(cache_dir)):
entries_count += len(files)
entries_count = sum(
len(files) for _path, _dirs, files in os.walk(str(cache_dir))
)
delete = self.confirm(f"<question>Delete {entries_count} entries?</>")
if not delete:
......
......@@ -70,9 +70,7 @@ You can specify a package in the following forms:
# Plugins should be installed in the system env to be globally available
system_env = EnvManager.get_system_env(naive=True)
env_dir = Path(
os.getenv("POETRY_HOME") if os.getenv("POETRY_HOME") else system_env.path
)
env_dir = Path(os.getenv("POETRY_HOME") or system_env.path)
# We check for the plugins existence first.
if env_dir.joinpath("pyproject.toml").exists():
......
......@@ -40,9 +40,7 @@ class PluginRemoveCommand(Command):
plugins = self.argument("plugins")
system_env = EnvManager.get_system_env(naive=True)
env_dir = Path(
os.getenv("POETRY_HOME") if os.getenv("POETRY_HOME") else system_env.path
)
env_dir = Path(os.getenv("POETRY_HOME") or system_env.path)
# From this point forward, all the logic will be deferred to
# the remove command, by using the global `pyproject.toml` file.
......
......@@ -50,11 +50,10 @@ list of installed packages
if group is None:
removed = []
group_sections = []
for group_name, group_section in poetry_content.get("group", {}).items():
group_sections.append(
group_sections = [
(group_name, group_section.get("dependencies", {}))
)
for group_name, group_section in poetry_content.get("group", {}).items()
]
for group_name, section in [
("default", poetry_content["dependencies"])
......
......@@ -160,13 +160,12 @@ class Factory(BaseFactory):
from poetry.utils.helpers import get_cert
from poetry.utils.helpers import get_client_cert
if "url" in source:
if "url" not in source:
raise RuntimeError("Unsupported source specified")
# PyPI-like repository
if "name" not in source:
raise RuntimeError("Missing [name] in source.")
else:
raise RuntimeError("Unsupported source specified")
name = source["name"]
url = source["url"]
......
......@@ -322,10 +322,7 @@ class PackageInfo:
if python_requires is None:
python_requires = "*"
requires = ""
for dep in result["install_requires"]:
requires += dep + "\n"
requires = "".join(dep + "\n" for dep in result["install_requires"])
if result["extras_require"]:
requires += "\n"
......
......@@ -85,14 +85,13 @@ class Chooser:
return chosen
def _get_links(self, package: "Package") -> List["Link"]:
if not package.source_type:
if not self._pool.has_repository("pypi"):
if package.source_type:
repository = self._pool.repository(package.source_reference)
elif not self._pool.has_repository("pypi"):
repository = self._pool.repositories[0]
else:
repository = self._pool.repository("pypi")
else:
repository = self._pool.repository(package.source_reference)
links = repository.find_links_for_package(package)
hashes = [f["hash"] for f in package.files]
......@@ -142,7 +141,6 @@ class Chooser:
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
support_num = len(self._env.supported_tags)
build_tag = ()
binary_preference = 0
if link.is_wheel:
......@@ -160,6 +158,7 @@ class Chooser:
build_tag_groups = match.groups()
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
else: # sdist
support_num = len(self._env.supported_tags)
pri = -support_num
has_allowed_hash = int(self._is_link_hash_allowed_for_package(link, package))
......
......@@ -108,17 +108,14 @@ class EditableBuilder(Builder):
os.remove(str(setup))
def _add_pth(self) -> List[Path]:
paths = set()
for include in self._module.includes:
if isinstance(include, PackageInclude) and (
include.is_module() or include.is_package()
):
paths.add(include.base.resolve().as_posix())
content = ""
for path in paths:
content += decode(path + os.linesep)
paths = {
include.base.resolve().as_posix()
for include in self._module.includes
if isinstance(include, PackageInclude)
and (include.is_module() or include.is_package())
}
content = "".join(decode(path + os.linesep) for path in paths)
pth_file = Path(self._module.name).with_suffix(".pth")
# remove any pre-existing pth files for this package
......
......@@ -201,12 +201,10 @@ class _Writer:
derived_cause: ConflictCause = derived.cause
if isinstance(derived_cause.conflict.cause, ConflictCause):
collapsed_derived = derived_cause.conflict
collapsed_ext = derived_cause.other
else:
collapsed_derived = derived_cause.other
if isinstance(derived_cause.conflict.cause, ConflictCause):
collapsed_ext = derived_cause.other
else:
collapsed_ext = derived_cause.conflict
details_for_cause = {}
......
......@@ -36,15 +36,12 @@ class Incompatibility:
if not term.is_positive() or not term.dependency.is_root
]
if (
len(terms) == 1
if len(terms) != 1 and (
# Short-circuit in the common case of a two-term incompatibility with
# two different packages (for example, a dependency).
or len(terms) == 2
and terms[0].dependency.complete_name != terms[-1].dependency.complete_name
len(terms) != 2
or terms[0].dependency.complete_name == terms[-1].dependency.complete_name
):
pass
else:
# Coalesce multiple terms about the same package if possible.
by_name: Dict[str, Dict[str, "Term"]] = {}
for term in terms:
......@@ -178,7 +175,9 @@ class Incompatibility:
term2 = self._terms[1]
if term1.is_positive() == term2.is_positive():
if term1.is_positive():
if not term1.is_positive():
return f"either {self._terse(term1)} or {self._terse(term2)}"
package1 = (
term1.dependency.name
if term1.constraint.is_any()
......@@ -191,8 +190,6 @@ class Incompatibility:
)
return f"{package1} is incompatible with {package2}"
else:
return f"either {self._terse(term1)} or {self._terse(term2)}"
positive = []
negative = []
......@@ -204,12 +201,11 @@ class Incompatibility:
negative.append(self._terse(term))
if positive and negative:
if len(positive) == 1:
positive_term = [term for term in self._terms if term.is_positive()][0]
if len(positive) != 1:
return f"if {' and '.join(positive)} then {' or '.join(negative)}"
positive_term = [term for term in self._terms if term.is_positive()][0]
return f"{self._terse(positive_term, allow_every=True)} requires {' or '.join(negative)}"
else:
return f"if {' and '.join(positive)} then {' or '.join(negative)}"
elif positive:
return f"one of {' or '.join(positive)} must be false"
else:
......
......@@ -23,10 +23,7 @@ class PythonRequirementSolutionProvider(HasSolutionsForException):
str(exception),
)
if not m:
return False
return True
return bool(m)
def get_solutions(self, exception: Exception) -> List["Solution"]:
from poetry.mixology.solutions.solutions.python_requirement_solution import (
......
......@@ -96,9 +96,7 @@ class VersionSolver:
Performs unit propagation on incompatibilities transitively
related to package to derive new assignments for _solution.
"""
changed = set()
changed.add(package)
changed = {package}
while changed:
package = changed.pop()
......@@ -269,10 +267,9 @@ class VersionSolver:
# true (that is, we know for sure no solution will satisfy the
# incompatibility) while also approximating the intuitive notion of the
# "root cause" of the conflict.
new_terms = []
for term in incompatibility.terms:
if term != most_recent_term:
new_terms.append(term)
new_terms = [
term for term in incompatibility.terms if term != most_recent_term
]
for term in most_recent_satisfier.cause.terms:
if term.dependency != most_recent_satisfier.dependency:
......
......@@ -94,9 +94,6 @@ class Solver:
def solve_in_compatibility_mode(
self, overrides: Tuple[Dict, ...], use_latest: List[str] = None
) -> Tuple[List["Package"], List[int]]:
locked = {}
for package in self._locked.packages:
locked[package.name] = DependencyPackage(package.to_dependency(), package)
packages = []
depths = []
......@@ -127,9 +124,10 @@ class Solver:
if self._provider._overrides:
self._overrides.append(self._provider._overrides)
locked = {}
for package in self._locked.packages:
locked[package.name] = DependencyPackage(package.to_dependency(), package)
locked = {
package.name: DependencyPackage(package.to_dependency(), package)
for package in self._locked.packages
}
try:
result = resolve_version(
......@@ -218,16 +216,16 @@ def depth_first_search(
name_children[node.name].extend(node.reachable())
combined_nodes[node.name].append(node)
combined_topo_sorted_nodes = []
for node in topo_sorted_nodes:
if node.name in combined_nodes:
combined_topo_sorted_nodes.append(combined_nodes.pop(node.name))
combined_topo_sorted_nodes = [
combined_nodes.pop(node.name)
for node in topo_sorted_nodes
if node.name in combined_nodes
]
results = [
return [
aggregator(nodes, name_children[nodes[0].name])
for nodes in combined_topo_sorted_nodes
]
return results
def dfs_visit(
......@@ -333,21 +331,20 @@ class PackageNode(DFSNode):
continue
for pkg in self.packages:
if pkg.complete_name == dependency.complete_name and (
if (
pkg.complete_name == dependency.complete_name
and (
dependency.constraint.allows(pkg.version)
or dependency.allows_prereleases()
and pkg.version.is_unstable()
and dependency.constraint.allows(pkg.version.stable)
):
# If there is already a child with this name
# we merge the requirements
if any(
)
and not any(
child.package.name == pkg.name
and child.groups == dependency.groups
for child in children
)
):
continue
children.append(
PackageNode(
pkg,
......
......@@ -65,12 +65,10 @@ class Transaction:
if with_uninstalls:
for current_package in self._current_packages:
found = False
for result_package, _ in self._result_packages:
if current_package.name == result_package.name:
found = True
break
found = any(
current_package.name == result_package.name
for result_package, _ in self._result_packages
)
if not found:
for installed_package in self._installed_packages:
......
......@@ -146,8 +146,7 @@ class InstalledRepository(Repository):
if is_editable_package:
source_type = "directory"
source_url = paths.pop().as_posix()
else:
if cls.is_vcs_package(path, env):
elif cls.is_vcs_package(path, env):
(
source_type,
source_url,
......
......@@ -326,9 +326,7 @@ class PyPiRepository(RemoteRepository):
if json_response.status_code == 404:
return None
json_data = json_response.json()
return json_data
return json_response.json()
def _get_info_from_urls(self, urls: Dict[str, List[str]]) -> "PackageInfo":
# Checking wheels first as they are more likely to hold
......
......@@ -101,16 +101,14 @@ def user_data_dir(appname: str, roaming: bool = False) -> str:
"""
if WINDOWS:
const = "CSIDL_APPDATA" if roaming else "CSIDL_LOCAL_APPDATA"
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
return os.path.join(os.path.normpath(_get_win_folder(const)), appname)
elif sys.platform == "darwin":
path = os.path.join(expanduser("~/Library/Application Support/"), appname)
return os.path.join(expanduser("~/Library/Application Support/"), appname)
else:
path = os.path.join(
return os.path.join(
os.getenv("XDG_DATA_HOME", expanduser("~/.local/share")), appname
)
return path
def user_config_dir(appname: str, roaming: bool = True) -> str:
"""Return full path to the user-specific config dir for this application.
......@@ -220,11 +218,7 @@ def _get_win_folder_with_ctypes(csidl_name: str) -> str:
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
has_high_char = any(ord(c) > 255 for c in buf)
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
......
......@@ -115,14 +115,10 @@ class Authenticator:
# behaves if more than one @ is present (which can be checked using
# the password attribute of urlsplit()'s return value).
auth, netloc = netloc.rsplit("@", 1)
if ":" in auth:
# Split from the left because that's how urllib.parse.urlsplit()
# behaves if more than one : is present (which again can be checked
# using the password attribute of the return value)
credentials = auth.split(":", 1)
else:
credentials = auth, None
credentials = auth.split(":", 1) if ":" in auth else (auth, None)
credentials = tuple(
None if x is None else urllib.parse.unquote(x) for x in credentials
)
......@@ -166,8 +162,6 @@ class Authenticator:
def _get_credentials_for_netloc(
self, netloc: str
) -> Tuple[Optional[str], Optional[str]]:
credentials = (None, None)
for repository_name in self._config.get("repositories", []):
auth = self._get_http_auth(repository_name, netloc)
......@@ -176,7 +170,7 @@ class Authenticator:
return auth["username"], auth["password"]
return credentials
return None, None
def _get_credentials_for_netloc_from_keyring(
self, url: str, netloc: str, username: Optional[str]
......
......@@ -253,7 +253,6 @@ class SitePackages:
return [path]
except ValueError:
pass
else:
site_type = "writable " if writable_only else ""
raise ValueError(
f"{path} is not relative to any discovered {site_type}sites"
......@@ -284,7 +283,6 @@ class SitePackages:
) -> Optional[metadata.PathDistribution]:
for distribution in self.distributions(name=name, writable_only=writable_only):
return distribution
else:
return None
def find_distribution_files_with_suffix(
......@@ -1333,7 +1331,7 @@ class Env:
"""
call = kwargs.pop("call", False)
input_ = kwargs.pop("input_", None)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()})
env = kwargs.pop("env", dict(os.environ))
try:
if self._is_windows:
......@@ -1364,11 +1362,11 @@ class Env:
def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]:
command = self.get_command_from_bin(bin) + list(args)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()})
env = kwargs.pop("env", dict(os.environ))
if not self._is_windows:
return os.execvpe(command[0], command, env=env)
else:
exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
exe.communicate()
return exe.returncode
......@@ -1501,12 +1499,9 @@ class SystemEnv(Env):
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": ".".join(
v for v in platform.python_version().split(".")[:2]
),
"python_version": ".".join(platform.python_version().split(".")[:2]),
"sys_platform": sys.platform,
"version_info": sys.version_info,
# Extra information
"interpreter_name": interpreter_name(),
"interpreter_version": interpreter_version(),
}
......@@ -1720,11 +1715,11 @@ class GenericEnv(VirtualEnv):
def execute(self, bin: str, *args: str, **kwargs: Any) -> Optional[int]:
command = self.get_command_from_bin(bin) + list(args)
env = kwargs.pop("env", {k: v for k, v in os.environ.items()})
env = kwargs.pop("env", dict(os.environ))
if not self._is_windows:
return os.execvpe(command[0], command, env=env)
else:
exe = subprocess.Popen([command[0]] + command[1:], env=env, **kwargs)
exe.communicate()
......
......@@ -37,7 +37,7 @@ class VersionSelector:
},
)
candidates = self._pool.find_packages(dependency)
only_prereleases = all([c.version.is_unstable() for c in candidates])
only_prereleases = all(c.version.is_unstable() for c in candidates)
if not candidates:
return False
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment