Commit 35326219 by Bjorn Neergaard

chore: fix comprehensions according to flake8-comprehensions

parent 1cf06510
......@@ -46,6 +46,7 @@ repos:
- id: flake8
additional_dependencies:
- flake8-bugbear==21.9.2
- flake8-comprehensions==3.7.0
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.910-1
......
......@@ -14,7 +14,7 @@ class CacheListCommand(Command):
from poetry.locations import REPOSITORY_CACHE_DIR
if os.path.exists(str(REPOSITORY_CACHE_DIR)):
caches = list(sorted(REPOSITORY_CACHE_DIR.iterdir()))
caches = sorted(REPOSITORY_CACHE_DIR.iterdir())
if caches:
for cache in caches:
self.line("<info>{}</>".format(cache.name))
......
......@@ -414,7 +414,7 @@ The <c1>init</c1> command creates a basic <comment>pyproject.toml</> file in the
parsed = ParsedUrl.parse(requirement)
url = Git.normalize_url(requirement)
pair = dict([("name", parsed.name), ("git", url.url)])
pair = {"name": parsed.name, "git": url.url}
if parsed.rev:
pair["rev"] = url.revision
......@@ -431,7 +431,7 @@ The <c1>init</c1> command creates a basic <comment>pyproject.toml</> file in the
elif url_parsed.scheme in ["http", "https"]:
package = Provider.get_package_from_url(requirement)
pair = dict([("name", package.name), ("url", package.source_url)])
pair = {"name": package.name, "url": package.source_url}
if extras:
pair["extras"] = extras
......@@ -475,7 +475,7 @@ The <c1>init</c1> command creates a basic <comment>pyproject.toml</> file in the
)
pair = pair.strip()
require = dict()
require = {}
if " " in pair:
name, version = pair.split(" ", 2)
extras_m = re.search(r"\[([\w\d,-_]+)\]$", name)
......
......@@ -163,7 +163,7 @@ lists all packages available."""
with solver.use_environment(self.env):
ops = solver.solve().calculate_operations()
required_locked_packages = set([op.package for op in ops if not op.skipped])
required_locked_packages = {op.package for op in ops if not op.skipped}
if package:
pkg = None
......
......@@ -221,7 +221,7 @@ class Factory(BaseFactory):
constraint["markers"] = str(dep.marker)
if dep.extras:
constraint["extras"] = list(sorted(dep.extras))
constraint["extras"] = sorted(dep.extras)
if len(constraint) == 1 and "version" in constraint:
constraint = constraint["version"]
......
......@@ -84,7 +84,7 @@ class Executor:
self._executed_operations = 0
self._executed = {"install": 0, "update": 0, "uninstall": 0}
self._skipped = {"install": 0, "update": 0, "uninstall": 0}
self._sections = dict()
self._sections = {}
self._lock = threading.Lock()
self._shutdown = False
self._hashes: Dict[str, str] = {}
......@@ -150,7 +150,7 @@ class Executor:
# We group operations by priority
groups = itertools.groupby(operations, key=lambda o: -o.priority)
self._sections = dict()
self._sections = {}
for _, group in groups:
tasks = []
serial_operations = []
......
......@@ -28,13 +28,13 @@ class PartialSolution:
self._assignments: List[Assignment] = []
# The decisions made for each package.
self._decisions: Dict[str, "Package"] = dict()
self._decisions: Dict[str, "Package"] = {}
# The intersection of all positive Assignments for each package, minus any
# negative Assignments that refer to that package.
#
# This is derived from self._assignments.
self._positive: Dict[str, Term] = dict()
self._positive: Dict[str, Term] = {}
# The union of all negative Assignments for each package.
#
......@@ -42,7 +42,7 @@ class PartialSolution:
# map.
#
# This is derived from self._assignments.
self._negative: Dict[str, Dict[str, Term]] = dict()
self._negative: Dict[str, Dict[str, Term]] = {}
# The number of distinct solutions that have been attempted so far.
self._attempted_solutions = 1
......
......@@ -327,7 +327,7 @@ class Locker:
pinned_versions=pinned_versions,
packages_by_name=packages_by_name,
project_level_dependencies=project_level_dependencies,
nested_dependencies=dict(),
nested_dependencies={},
)
# Merge same dependencies using marker union
......@@ -424,14 +424,12 @@ class Locker:
for extra, deps in sorted(root.extras.items())
}
lock["metadata"] = dict(
[
("lock-version", self._VERSION),
("python-versions", root.python_versions),
("content-hash", self._content_hash),
("files", files),
]
)
lock["metadata"] = {
"lock-version": self._VERSION,
"python-versions": root.python_versions,
"content-hash": self._content_hash,
"files": files,
}
if not self.is_locked() or lock != self.lock_data:
self._write_lock_data(lock)
......@@ -556,17 +554,15 @@ class Locker:
constraint["version"] for constraint in constraints
]
data = dict(
[
("name", package.pretty_name),
("version", package.pretty_version),
("description", package.description or ""),
("category", package.category),
("optional", package.optional),
("python-versions", package.python_versions),
("files", sorted(package.files, key=lambda x: x["file"])),
]
)
data = {
"name": package.pretty_name,
"version": package.pretty_version,
"description": package.description or "",
"category": package.category,
"optional": package.optional,
"python-versions": package.python_versions,
"files": sorted(package.files, key=lambda x: x["file"]),
}
if dependencies:
data["dependencies"] = table()
......@@ -600,7 +596,7 @@ class Locker:
)
).as_posix()
data["source"] = dict()
data["source"] = {}
if package.source_type:
data["source"]["type"] = package.source_type
......
......@@ -543,7 +543,7 @@ class Provider:
# An example of this is:
# - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
# - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
duplicates = dict()
duplicates = {}
for dep in dependencies:
if dep.complete_name not in duplicates:
duplicates[dep.complete_name] = []
......@@ -559,7 +559,7 @@ class Provider:
self.debug(f"<debug>Duplicate dependencies for {dep_name}</debug>")
# Regrouping by constraint
by_constraint = dict()
by_constraint = {}
for dep in deps:
if dep.constraint not in by_constraint:
by_constraint[dep.constraint] = []
......
......@@ -414,7 +414,7 @@ class PyPiRepository(RemoteRepository):
)
requires_dist.append(dep.to_pep_508())
info.requires_dist = sorted(list(set(requires_dist)))
info.requires_dist = sorted(set(requires_dist))
if info:
return info
......
......@@ -836,11 +836,10 @@ class EnvManager:
)
)
for python_to_try in reversed(
sorted(
self._poetry.package.AVAILABLE_PYTHONS,
key=lambda v: (v.startswith("3"), -len(v), v),
)
for python_to_try in sorted(
self._poetry.package.AVAILABLE_PYTHONS,
key=lambda v: (v.startswith("3"), -len(v), v),
reverse=True,
):
if len(python_to_try) == 1:
if not parse_constraint(f"^{python_to_try}.0").allows_any(
......
......@@ -593,15 +593,12 @@ def test_run_install_with_optional_group_selected(
@pytest.mark.parametrize(
"managed_reserved_package_names",
[
i
for i in itertools.chain(
[tuple()],
itertools.permutations(RESERVED_PACKAGES, 1),
itertools.permutations(RESERVED_PACKAGES, 2),
[RESERVED_PACKAGES],
)
],
itertools.chain(
[()],
itertools.permutations(RESERVED_PACKAGES, 1),
itertools.permutations(RESERVED_PACKAGES, 2),
[RESERVED_PACKAGES],
),
)
def test_run_install_with_synchronization(
managed_reserved_package_names, installer, locker, repo, package, installed
......@@ -672,7 +669,7 @@ def test_run_install_with_synchronization(
*managed_reserved_package_names,
}
assert expected_removals == set(r.name for r in installer.executor.removals)
assert expected_removals == {r.name for r in installer.executor.removals}
def test_run_whitelist_add(installer, locker, repo, package):
......
......@@ -295,15 +295,12 @@ def test_run_install_no_group(installer, locker, repo, package, installed):
@pytest.mark.parametrize(
"managed_reserved_package_names",
[
i
for i in itertools.chain(
[tuple()],
itertools.permutations(RESERVED_PACKAGES, 1),
itertools.permutations(RESERVED_PACKAGES, 2),
[RESERVED_PACKAGES],
)
],
itertools.chain(
[()],
itertools.permutations(RESERVED_PACKAGES, 1),
itertools.permutations(RESERVED_PACKAGES, 2),
[RESERVED_PACKAGES],
),
)
def test_run_install_with_synchronization(
managed_reserved_package_names, installer, locker, repo, package, installed
......@@ -377,7 +374,7 @@ def test_run_install_with_synchronization(
package_c.name,
*managed_reserved_package_names,
}
assert set(r.name for r in removals) == expected_removals
assert {r.name for r in removals} == expected_removals
def test_run_whitelist_add(installer, locker, repo, package):
......
......@@ -173,7 +173,7 @@ def test_authenticator_request_retries_on_exception(mocker, config, http):
sleep = mocker.patch("time.sleep")
sdist_uri = "https://foo.bar/files/{}/foo-0.1.0.tar.gz".format(str(uuid.uuid4()))
content = str(uuid.uuid4())
seen = list()
seen = []
def callback(request, uri, response_headers):
if seen.count(uri) < 2:
......
......@@ -482,12 +482,12 @@ def test_exporter_can_export_requirements_txt_with_nested_packages_and_markers_a
root = poetry.package.with_dependency_groups([], only=True)
root.add_dependency(
Factory.create_dependency(
name="a", constraint=dict(version="^1.2.3", python="<3.8")
name="a", constraint={"version": "^1.2.3", "python": "<3.8"}
)
)
root.add_dependency(
Factory.create_dependency(
name="b", constraint=dict(version="^4.5.6"), groups=["dev"]
name="b", constraint={"version": "^4.5.6"}, groups=["dev"]
)
)
poetry._package = root
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment