From f12de9a9d021ca68ea9c897884c12dc30558674a Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sun, 29 Nov 2020 13:32:45 -0800 Subject: [PATCH 1/9] Run pyupgrade for minor code cleanup https://github.com/asottile/pyupgrade Cleanups: - set comprehension - drop default open() argument --- piptools/cache.py | 2 +- piptools/repositories/pypi.py | 2 +- tests/conftest.py | 4 ++-- tests/test_cli_compile.py | 10 +++++----- tests/test_sync.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/piptools/cache.py b/piptools/cache.py index 301d38bd5..8bd01506f 100644 --- a/piptools/cache.py +++ b/piptools/cache.py @@ -39,7 +39,7 @@ def __str__(self): def read_cache_file(cache_file_path): - with open(cache_file_path, "r") as cache_file: + with open(cache_file_path) as cache_file: try: doc = json.load(cache_file) except ValueError: diff --git a/piptools/repositories/pypi.py b/piptools/repositories/pypi.py index 7a988bfc1..d065e235e 100644 --- a/piptools/repositories/pypi.py +++ b/piptools/repositories/pypi.py @@ -393,7 +393,7 @@ def _get_hashes_from_files(self, ireq): all_candidates = self.find_all_candidates(ireq.name) candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) matching_versions = list( - ireq.specifier.filter((candidate.version for candidate in all_candidates)) + ireq.specifier.filter(candidate.version for candidate in all_candidates) ) matching_candidates = candidates_by_version[matching_versions[0]] diff --git a/tests/conftest.py b/tests/conftest.py index e96b621f5..6adf2cb7f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -35,10 +35,10 @@ class FakeRepository(BaseRepository): def __init__(self): - with open("tests/test_data/fake-index.json", "r") as f: + with open("tests/test_data/fake-index.json") as f: self.index = json.load(f) - with open("tests/test_data/fake-editables.json", "r") as f: + with open("tests/test_data/fake-editables.json") as f: self.editables = json.load(f) @contextmanager diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 993bfe4ee..22106268e 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -118,7 +118,7 @@ def test_find_links_option(runner): assert "Using links:\n ./libs1\n ./libs2\n ./libs3\n" in out.stderr # Check that find-links has been written to a requirements.txt - with open("requirements.txt", "r") as req_txt: + with open("requirements.txt") as req_txt: assert ( "--find-links ./libs1\n--find-links ./libs2\n--find-links ./libs3\n" in req_txt.read() @@ -136,7 +136,7 @@ def test_find_links_envvar(monkeypatch, runner): assert "Using links:\n ./libs1\n ./libs2\n ./libs3\n" in out.stderr # Check that find-links has been written to a requirements.txt - with open("requirements.txt", "r") as req_txt: + with open("requirements.txt") as req_txt: assert ( "--find-links ./libs1\n--find-links ./libs2\n--find-links ./libs3\n" in req_txt.read() @@ -1166,7 +1166,7 @@ def test_dry_run_doesnt_touch_output_file( assert expected_cli_output_package in out.stderr.splitlines() # The package version must NOT be updated in the output file - with open("requirements.txt", "r") as req_txt: + with open("requirements.txt") as req_txt: assert "small-fake-a==0.1" in req_txt.read().splitlines() # The output file must not be touched @@ -1196,7 +1196,7 @@ def test_empty_input_file_no_header(runner, empty_input_pkg, prior_output_pkg): runner.invoke(cli, ["--no-header", "requirements.in"]) - with open("requirements.txt", "r") as req_txt: + with open("requirements.txt") as req_txt: assert req_txt.read().strip() == "" @@ -1218,7 +1218,7 @@ def test_upgrade_package_doesnt_remove_annotation(pip_conf, runner): ) runner.invoke(cli, ["-P", "small-fake-a", "--no-emit-find-links"]) - with open("requirements.txt", "r") as req_txt: + with open("requirements.txt") as req_txt: assert req_txt.read() == dedent( """\ # diff --git a/tests/test_sync.py b/tests/test_sync.py index f1ea97613..e4cc1fd8d 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -400,7 +400,7 @@ def test_sync_dry_run(runner, from_line, to_install, to_uninstall, expected_mess """ Sync with --dry-run option prints what's is going to be installed/uninstalled. """ - to_install = set(from_line(pkg) for pkg in to_install) + to_install = {from_line(pkg) for pkg in to_install} with runner.isolation() as (stdout, _): sync(to_install, to_uninstall, dry_run=True) @@ -427,7 +427,7 @@ def test_sync_ask_declined( Sync with --ask option does a dry run if the user declines """ - to_install = set(from_line(pkg) for pkg in to_install) + to_install = {from_line(pkg) for pkg in to_install} with runner.isolation("n\n") as (stdout, _): sync(to_install, to_uninstall, ask=True) From de00f69a54cfdc9d00b548db84e1adcffc167e60 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Wed, 25 Nov 2020 14:25:57 -0800 Subject: [PATCH 2/9] Remove testing for EOL Python 3.5 Python 3.5 went EOL 2020-09-30. For a list of supported Python versions, see: https://devguide.python.org/#status-of-python-branches For a list of EOL Python versions and their dates, see: https://devguide.python.org/devcycle/#end-of-life-branches --- .github/workflows/ci.yml | 1 - .github/workflows/cron.yml | 1 - README.rst | 4 +++- setup.cfg | 3 +-- tox.ini | 2 +- 5 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3c8509853..34670c964 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,7 +24,6 @@ jobs: python-version: - 3.9 - 2.7 - - 3.5 - 3.6 - 3.7 - 3.8 diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml index 9721ae8b1..60f4ed401 100644 --- a/.github/workflows/cron.yml +++ b/.github/workflows/cron.yml @@ -19,7 +19,6 @@ jobs: python-version: - 3.9 - 2.7 - - 3.5 - 3.6 - 3.7 - 3.8 diff --git a/README.rst b/README.rst index eac48de6c..c6c6c8825 100644 --- a/README.rst +++ b/README.rst @@ -493,5 +493,7 @@ versions as the required ``pip`` versions. +---------------+----------------+----------------+ | 5.4.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.8 | +---------------+----------------+----------------+ -| >= 5.5.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.9 | +| 5.5.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.9 | ++---------------+----------------+----------------+ +| 6.0.0 | 20.1 - 20.3.* | 2.7, 3.6 - 3.9 | +---------------+----------------+----------------+ diff --git a/setup.cfg b/setup.cfg index 565c51016..3b1cac413 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,6 @@ classifiers = Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.5 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 @@ -26,7 +25,7 @@ classifiers = Topic :: System :: Systems Administration [options] -python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* setup_requires = setuptools_scm packages = find: zip_safe = false diff --git a/tox.ini b/tox.ini index 02c98b40a..39415e4c0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] envlist = # NOTE: keep this in sync with the env list in .github/workflows/ci.yml. - py{27,35,36,37,38,39,py,py3}-pip{20.1,20.2,20.3,previous,latest,master}-coverage + py{27,36,37,38,39,py,py3}-pip{20.1,20.2,20.3,previous,latest,master}-coverage checkqa readme skip_missing_interpreters = True From 78ae8fb2cde97baee00b8ef9d6abb95d74cb1149 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sun, 29 Nov 2020 08:14:07 -0800 Subject: [PATCH 3/9] Remove support for EOL Python 2.7 Python 2.7 went EOL 2020-01-01. For a list of supported Python versions, see: https://devguide.python.org/#status-of-python-branches For a list of EOL Python versions and their dates, see: https://devguide.python.org/devcycle/#end-of-life-branches Removing support reduces compatibility code, CI resources, and other maintenance concerns. As well, can begin to take advantage of newer Python features and patterns. Cleanups: - Use f-strings - Drop __future__ imports - Drop file encoding cookies "# coding: utf-8" - Drop uses of six - Use stdlib TemporaryDirectory and unittest.mock - Use newer metaclass syntax - Drop now unnecessary fs_str() function - Drop inheritance from "object", all Python 3 classes are new-style classes - Remove unnecessary coerce to string/bytes calls - Use updated subprocess APIs --- .github/workflows/ci.yml | 1 - .github/workflows/cron.yml | 1 - .pre-commit-config.yaml | 1 + README.rst | 6 +-- piptools/__init__.py | 2 +- piptools/_compat/__init__.py | 24 --------- piptools/_compat/contextlib.py | 2 +- piptools/_compat/pip_compat.py | 3 -- piptools/_compat/tempfile.py | 88 -------------------------------- piptools/cache.py | 19 +++---- piptools/click.py | 2 - piptools/exceptions.py | 4 +- piptools/logging.py | 5 +- piptools/repositories/base.py | 8 +-- piptools/repositories/local.py | 3 -- piptools/repositories/pypi.py | 27 +++++----- piptools/resolver.py | 23 +++------ piptools/scripts/compile.py | 5 +- piptools/scripts/sync.py | 3 -- piptools/sync.py | 14 ++--- piptools/utils.py | 58 ++++++--------------- piptools/writer.py | 26 ++++------ setup.cfg | 9 +--- tests/conftest.py | 16 +++--- tests/test_cli_compile.py | 53 ++++++++++--------- tests/test_cli_sync.py | 59 ++++++++++----------- tests/test_compat.py | 21 -------- tests/test_locations.py | 11 ++-- tests/test_repository_local.py | 2 +- tests/test_repository_pypi.py | 2 +- tests/test_sync.py | 56 ++++++++++---------- tests/test_top_level_editable.py | 2 +- tests/test_utils.py | 20 +------- tests/utils.py | 14 ----- tox.ini | 2 +- 35 files changed, 178 insertions(+), 414 deletions(-) delete mode 100644 piptools/_compat/tempfile.py delete mode 100644 tests/test_compat.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34670c964..d3dfd8efe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,6 @@ jobs: - macOS python-version: - 3.9 - - 2.7 - 3.6 - 3.7 - 3.8 diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml index 60f4ed401..1ec8dd6a7 100644 --- a/.github/workflows/cron.yml +++ b/.github/workflows/cron.yml @@ -18,7 +18,6 @@ jobs: - MacOS python-version: - 3.9 - - 2.7 - 3.6 - 3.7 - 3.8 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f66d31c6..552d29cea 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,7 @@ repos: rev: 20.8b1 hooks: - id: black + args: [--target-version=py36] language_version: python3 - repo: https://github.com/PyCQA/isort rev: 5.6.4 diff --git a/README.rst b/README.rst index c6c6c8825..5224cbc23 100644 --- a/README.rst +++ b/README.rst @@ -421,7 +421,7 @@ then yes, you should commit both ``requirements.in`` and ``requirements.txt`` to Note that if you are deploying on multiple Python environments (read the section below), then you must commit a seperate output file for each Python environment. We suggest to use the ``{env}-requirements.txt`` format -(ex: ``win32-py2.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.). +(ex: ``win32-py3.7-requirements.txt``, ``macos-py3.6-requirements.txt``, etc.). Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-compile`` @@ -429,7 +429,7 @@ Cross-environment usage of ``requirements.in``/``requirements.txt`` and ``pip-co The dependencies of a package can change depending on the Python environment in which it is installed. Here, we define a Python environment as the combination of Operating -System, Python version (2.7, 3.6, etc.), and Python implementation (CPython, PyPy, +System, Python version (3.6, 3.7, etc.), and Python implementation (CPython, PyPy, etc.). For an exact definition, refer to the possible combinations of `PEP 508 environment markers`_. @@ -495,5 +495,5 @@ versions as the required ``pip`` versions. +---------------+----------------+----------------+ | 5.5.0 | 20.1 - 20.3.* | 2.7, 3.5 - 3.9 | +---------------+----------------+----------------+ -| 6.0.0 | 20.1 - 20.3.* | 2.7, 3.6 - 3.9 | +| 6.0.0 | 20.1 - 20.3.* | 3.6 - 3.9 | +---------------+----------------+----------------+ diff --git a/piptools/__init__.py b/piptools/__init__.py index 9f0c95aa5..3daa2f20d 100644 --- a/piptools/__init__.py +++ b/piptools/__init__.py @@ -8,4 +8,4 @@ locale.setlocale(locale.LC_ALL, "") except locale.Error as e: # pragma: no cover # setlocale can apparently crash if locale are uninitialized - secho("Ignoring error when setting locale: {}".format(e), fg="red") + secho(f"Ignoring error when setting locale: {e}", fg="red") diff --git a/piptools/_compat/__init__.py b/piptools/_compat/__init__.py index de28628db..c715e19b3 100644 --- a/piptools/_compat/__init__.py +++ b/piptools/_compat/__init__.py @@ -1,26 +1,2 @@ -# coding: utf-8 # flake8: noqa -from __future__ import absolute_import, division, print_function, unicode_literals - -import errno -import os - -from pip._vendor import six - from .pip_compat import PIP_VERSION, parse_requirements - -if six.PY2: - from .tempfile import TemporaryDirectory -else: - from tempfile import TemporaryDirectory - - -def makedirs(name, mode=0o777, exist_ok=False): - if six.PY2: - try: - os.makedirs(name, mode) - except OSError as e: - if not exist_ok or e.errno != errno.EEXIST: - raise - else: - os.makedirs(name, mode, exist_ok) diff --git a/piptools/_compat/contextlib.py b/piptools/_compat/contextlib.py index 04039ccb0..067f8dc8f 100644 --- a/piptools/_compat/contextlib.py +++ b/piptools/_compat/contextlib.py @@ -1,5 +1,5 @@ # Ported from python 3.7 contextlib.py -class nullcontext(object): +class nullcontext: """Context manager that does no additional processing. Used as a stand-in for a normal context manager, when a particular block of code is only sometimes used with a normal context manager: diff --git a/piptools/_compat/pip_compat.py b/piptools/_compat/pip_compat.py index 6cd24a0ff..40dadb135 100644 --- a/piptools/_compat/pip_compat.py +++ b/piptools/_compat/pip_compat.py @@ -1,6 +1,3 @@ -# -*- coding=utf-8 -*- -from __future__ import absolute_import - import pip from pip._internal.req import parse_requirements as _parse_requirements from pip._internal.req.constructors import install_req_from_parsed_requirement diff --git a/piptools/_compat/tempfile.py b/piptools/_compat/tempfile.py deleted file mode 100644 index dc7e9ef99..000000000 --- a/piptools/_compat/tempfile.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function - -import os as _os -import sys as _sys -import warnings as _warnings -from tempfile import mkdtemp - - -class TemporaryDirectory(object): - """Create and return a temporary directory. This has the same - behavior as mkdtemp but can be used as a context manager. For - example: - - with TemporaryDirectory() as tmpdir: - ... - - Upon exiting the context, the directory and everything contained - in it are removed. - """ - - def __init__(self, suffix="", prefix="tmp", dir=None): - self._closed = False - self.name = None # Handle mkdtemp raising an exception - self.name = mkdtemp(suffix, prefix, dir) - - def __repr__(self): - return "<{} {!r}>".format(self.__class__.__name__, self.name) - - def __enter__(self): - return self.name - - def cleanup(self): - if self.name and not self._closed: - try: - self._rmtree(self.name) - except (TypeError, AttributeError) as ex: - # Issue #10188: Emit a warning on stderr - # if the directory could not be cleaned - # up due to missing globals - if "None" not in str(ex): - raise - print( - "ERROR: {!r} while cleaning up {!r}".format(ex, self), - file=_sys.stderr, - ) - return - self._closed = True - - def __exit__(self, exc, value, tb): - self.cleanup() - - def __del__(self): - # Issue a ResourceWarning if implicit cleanup needed - self.cleanup() - - # XXX (ncoghlan): The following code attempts to make - # this class tolerant of the module nulling out process - # that happens during CPython interpreter shutdown - # Alas, it doesn't actually manage it. See issue #10188 - _listdir = staticmethod(_os.listdir) - _path_join = staticmethod(_os.path.join) - _isdir = staticmethod(_os.path.isdir) - _islink = staticmethod(_os.path.islink) - _remove = staticmethod(_os.remove) - _rmdir = staticmethod(_os.rmdir) - _warn = _warnings.warn - - def _rmtree(self, path): - # Essentially a stripped down version of shutil.rmtree. We can't - # use globals because they may be None'ed out at shutdown. - for name in self._listdir(path): - fullname = self._path_join(path, name) - try: - isdir = self._isdir(fullname) and not self._islink(fullname) - except OSError: - isdir = False - if isdir: - self._rmtree(fullname) - else: - try: - self._remove(fullname) - except OSError: - pass - try: - self._rmdir(path) - except OSError: - pass diff --git a/piptools/cache.py b/piptools/cache.py index 8bd01506f..f5d7aeeeb 100644 --- a/piptools/cache.py +++ b/piptools/cache.py @@ -1,7 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - -import errno import json import os import platform @@ -9,7 +5,6 @@ from pip._vendor.packaging.requirements import Requirement -from ._compat import makedirs from .exceptions import PipToolsError from .utils import as_tuple, key_from_req, lookup_table @@ -33,7 +28,7 @@ def __str__(self): lines = [ "The dependency cache seems to have been corrupted.", "Inspect, or delete, the following file:", - " {}".format(self.path), + f" {self.path}", ] return os.linesep.join(lines) @@ -51,7 +46,7 @@ def read_cache_file(cache_file_path): return doc["dependencies"] -class DependencyCache(object): +class DependencyCache: """ Creates a new persistent dependency cache for the current Python version. The cache file is written to the appropriate user cache dir for the @@ -64,8 +59,8 @@ class DependencyCache(object): """ def __init__(self, cache_dir): - makedirs(cache_dir, exist_ok=True) - cache_filename = "depcache-{}.json".format(_implementation_name()) + os.makedirs(cache_dir, exist_ok=True) + cache_filename = f"depcache-{_implementation_name()}.json" self._cache_file = os.path.join(cache_dir, cache_filename) self._cache = None @@ -98,15 +93,13 @@ def as_cache_key(self, ireq): extras_string = "" else: extras_string = "[{}]".format(",".join(extras)) - return name, "{}{}".format(version, extras_string) + return name, f"{version}{extras_string}" def read_cache(self): """Reads the cached contents into memory.""" try: self._cache = read_cache_file(self._cache_file) - except IOError as e: - if e.errno != errno.ENOENT: - raise + except FileNotFoundError: self._cache = {} def write_cache(self): diff --git a/piptools/click.py b/piptools/click.py index 86f1612c6..1172f7fb0 100644 --- a/piptools/click.py +++ b/piptools/click.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import click from click import * # noqa diff --git a/piptools/exceptions.py b/piptools/exceptions.py index 527897274..715129e76 100644 --- a/piptools/exceptions.py +++ b/piptools/exceptions.py @@ -22,7 +22,7 @@ def __str__(self): else: versions.append(version) - lines = ["Could not find a version that matches {}".format(self.ireq)] + lines = [f"Could not find a version that matches {self.ireq}"] if versions: lines.append("Tried: {}".format(", ".join(versions))) @@ -41,7 +41,7 @@ def __str__(self): "There are incompatible versions in the resolved dependencies:" ) source_ireqs = getattr(self.ireq, "_source_ireqs", []) - lines.extend(" {}".format(ireq) for ireq in source_ireqs) + lines.extend(f" {ireq}" for ireq in source_ireqs) else: redacted_urls = tuple( redact_auth_from_url(url) for url in self.finder.index_urls diff --git a/piptools/logging.py b/piptools/logging.py index dcf068f7a..87c552667 100644 --- a/piptools/logging.py +++ b/piptools/logging.py @@ -1,6 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - import contextlib import logging import sys @@ -12,7 +9,7 @@ logging.basicConfig() -class LogContext(object): +class LogContext: stream = sys.stderr def __init__(self, verbosity=0, indent_width=2): diff --git a/piptools/repositories/base.py b/piptools/repositories/base.py index 54849cb7f..b8b462f0b 100644 --- a/piptools/repositories/base.py +++ b/piptools/repositories/base.py @@ -1,14 +1,8 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - from abc import ABCMeta, abstractmethod from contextlib import contextmanager -from pip._vendor.six import add_metaclass - -@add_metaclass(ABCMeta) -class BaseRepository(object): +class BaseRepository(metaclass=ABCMeta): def clear_caches(self): """Should clear any caches used by the implementation.""" diff --git a/piptools/repositories/local.py b/piptools/repositories/local.py index f185f35c3..e2a96be4d 100644 --- a/piptools/repositories/local.py +++ b/piptools/repositories/local.py @@ -1,6 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - from contextlib import contextmanager from pip._internal.utils.hashes import FAVORITE_HASH diff --git a/piptools/repositories/pypi.py b/piptools/repositories/pypi.py index d065e235e..ab6724f90 100644 --- a/piptools/repositories/pypi.py +++ b/piptools/repositories/pypi.py @@ -1,11 +1,9 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - import collections import hashlib import itertools import logging import os +import tempfile from contextlib import contextmanager from shutil import rmtree @@ -24,13 +22,12 @@ from pip._internal.utils.urls import path_to_url, url_to_path from pip._vendor.requests import RequestException -from .._compat import PIP_VERSION, TemporaryDirectory, contextlib, makedirs +from .._compat import PIP_VERSION, contextlib from ..click import progressbar from ..exceptions import NoCandidateFound from ..logging import log from ..utils import ( as_tuple, - fs_str, is_pinned_requirement, is_url_requirement, lookup_table, @@ -90,9 +87,9 @@ def __init__(self, pip_args, cache_dir): self._build_dir = None self._source_dir = None self._cache_dir = normalize_path(cache_dir) - self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs")) + self._download_dir = os.path.join(self._cache_dir, "pkgs") if PIP_VERSION[:2] <= (20, 2): - self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels")) + self._wheel_download_dir = os.path.join(self._cache_dir, "wheels") self._setup_logging() @@ -102,8 +99,8 @@ def freshen_build_caches(self): Start with fresh build/source caches. Will remove any old build caches from disk automatically. """ - self._build_dir = TemporaryDirectory(fs_str("build")) - self._source_dir = TemporaryDirectory(fs_str("source")) + self._build_dir = tempfile.TemporaryDirectory("build") + self._source_dir = tempfile.TemporaryDirectory("source") try: yield finally: @@ -238,9 +235,9 @@ def get_dependencies(self, ireq): download_dir = None else: download_dir = self._get_download_path(ireq) - makedirs(download_dir, exist_ok=True) + os.makedirs(download_dir, exist_ok=True) if PIP_VERSION[:2] <= (20, 2): - makedirs(self._wheel_download_dir, exist_ok=True) + os.makedirs(self._wheel_download_dir, exist_ok=True) with global_tempdir_manager(): wheel_cache = WheelCache(self._cache_dir, self.options.format_control) @@ -270,7 +267,7 @@ def _get_project(self, ireq): for index_url in self.finder.search_scope.index_urls ) for package_index in package_indexes: - url = "{url}/{name}/json".format(url=package_index.pypi_url, name=ireq.name) + url = f"{package_index.pypi_url}/{ireq.name}/json" try: response = self.session.get(url) except RequestException as e: @@ -340,7 +337,7 @@ def get_hashes(self, ireq): return {self._get_file_hash(cached_link)} if not is_pinned_requirement(ireq): - raise TypeError("Expected pinned requirement, got {}".format(ireq)) + raise TypeError(f"Expected pinned requirement, got {ireq}") log.debug(ireq.name) @@ -402,7 +399,7 @@ def _get_hashes_from_files(self, ireq): } def _get_file_hash(self, link): - log.debug("Hashing {}".format(link.show_url)) + log.debug(f"Hashing {link.show_url}") h = hashlib.new(FAVORITE_HASH) with open_local_or_remote_file(link, self.session) as f: # Chunks to iterate @@ -509,7 +506,7 @@ def open_local_or_remote_file(link, session): # Local URL local_path = url_to_path(url) if os.path.isdir(local_path): - raise ValueError("Cannot open directory for read: {}".format(url)) + raise ValueError(f"Cannot open directory for read: {url}") else: st = os.stat(local_path) with open(local_path, "rb") as local_file: diff --git a/piptools/resolver.py b/piptools/resolver.py index d46a04a9e..6fe9f3a6b 100644 --- a/piptools/resolver.py +++ b/piptools/resolver.py @@ -1,6 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - import copy from functools import partial from itertools import chain, count, groupby @@ -23,7 +20,7 @@ magenta = partial(click.style, fg="magenta") -class RequirementSummary(object): +class RequirementSummary: """ Summary of a requirement's properties for comparison purposes. """ @@ -102,7 +99,7 @@ def combine_install_requirements(repository, ireqs): return combined_ireq -class Resolver(object): +class Resolver: def __init__( self, constraints, @@ -156,8 +153,7 @@ def resolve(self, max_rounds=10): self.repository.clear_caches() # Ignore existing packages - # NOTE: str() wrapping necessary for Python 2/3 compat - with update_env_context_manager(PIP_EXISTS_ACTION=str("i")): + with update_env_context_manager(PIP_EXISTS_ACTION="i"): for current_round in count(start=1): # pragma: no branch if current_round > max_rounds: raise RuntimeError( @@ -168,7 +164,7 @@ def resolve(self, max_rounds=10): ) log.debug("") - log.debug(magenta("{:^60}".format("ROUND {}".format(current_round)))) + log.debug(magenta("{:^60}".format(f"ROUND {current_round}"))) # If a package version (foo==2.0) was built in a previous round, # and in this round a different version of foo needs to be built # (i.e. foo==1.0), the directory will exist already, which will @@ -291,11 +287,11 @@ def _resolve_one_round(self): log.debug("New dependencies found in this round:") with log.indentation(): for new_dependency in sorted(diff, key=key_from_ireq): - log.debug("adding {}".format(new_dependency)) + log.debug(f"adding {new_dependency}") log.debug("Removed dependencies in this round:") with log.indentation(): for removed_dependency in sorted(removed, key=key_from_ireq): - log.debug("removing {}".format(removed_dependency)) + log.debug(f"removing {removed_dependency}") # Store the last round's results in the their_constraints self.their_constraints = theirs @@ -365,13 +361,10 @@ def _iter_dependencies(self, ireq): return if ireq.editable or is_url_requirement(ireq): - for dependency in self.repository.get_dependencies(ireq): - yield dependency + yield from self.repository.get_dependencies(ireq) return elif not is_pinned_requirement(ireq): - raise TypeError( - "Expected pinned or editable requirement, got {}".format(ireq) - ) + raise TypeError(f"Expected pinned or editable requirement, got {ireq}") # Now, either get the dependencies from the dependency cache (for # speed), or reach out to the external repository to diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py index ca650e491..abd7b6fd6 100755 --- a/piptools/scripts/compile.py +++ b/piptools/scripts/compile.py @@ -1,6 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - import os import shlex import sys @@ -46,7 +43,7 @@ def parse_args(self, ctx, args): Override base `parse_args` to store the argument part of `sys.argv`. """ self._os_args = set(args) - return super(BaseCommand, self).parse_args(ctx, args) + return super().parse_args(ctx, args) def has_arg(self, arg_name): """ diff --git a/piptools/scripts/sync.py b/piptools/scripts/sync.py index 9759b302f..99236826b 100755 --- a/piptools/scripts/sync.py +++ b/piptools/scripts/sync.py @@ -1,6 +1,3 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - import itertools import os import shlex diff --git a/piptools/sync.py b/piptools/sync.py index 9967682c7..7a92b1ef2 100644 --- a/piptools/sync.py +++ b/piptools/sync.py @@ -2,7 +2,7 @@ import os import sys import tempfile -from subprocess import check_call # nosec +from subprocess import run # nosec from pip._internal.commands.freeze import DEV_PKGS from pip._internal.utils.compat import stdlib_pkgs @@ -169,7 +169,7 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) if to_uninstall: click.echo("Would uninstall:") for pkg in sorted(to_uninstall): - click.echo(" {}".format(pkg)) + click.echo(f" {pkg}") if to_install: click.echo("Would install:") @@ -184,10 +184,11 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) if not dry_run: if to_uninstall: - check_call( # nosec + run( # nosec [sys.executable, "-m", "pip", "uninstall", "-y"] + pip_flags - + sorted(to_uninstall) + + sorted(to_uninstall), + check=True, ) if to_install: @@ -205,10 +206,11 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) tmp_req_file.close() try: - check_call( # nosec + run( # nosec [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name] + pip_flags - + install_flags + + install_flags, + check=True, ) finally: os.unlink(tmp_req_file.name) diff --git a/piptools/utils.py b/piptools/utils.py index 4b20ba6e3..49cf998c7 100644 --- a/piptools/utils.py +++ b/piptools/utils.py @@ -1,7 +1,4 @@ -# coding: utf-8 -from __future__ import absolute_import, division, print_function, unicode_literals - -import sys +import shlex from collections import OrderedDict from itertools import chain @@ -9,8 +6,6 @@ from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.misc import redact_auth_from_url from pip._internal.vcs import is_url -from pip._vendor import six -from pip._vendor.six.moves import shlex_quote from .click import style @@ -60,7 +55,7 @@ def make_install_requirement(name, version, extras, constraint=False): extras_string = "[{}]".format(",".join(sorted(extras))) return install_req_from_line( - str("{}{}=={}".format(name, extras_string, version)), constraint=constraint + str(f"{name}{extras_string}=={version}"), constraint=constraint ) @@ -78,18 +73,18 @@ def format_requirement(ireq, marker=None, hashes=None): in a less verbose way than using its `__str__` method. """ if ireq.editable: - line = "-e {}".format(ireq.link.url) + line = f"-e {ireq.link.url}" elif is_url_requirement(ireq): line = ireq.link.url else: line = str(ireq.req).lower() if marker: - line = "{} ; {}".format(line, marker) + line = f"{line} ; {marker}" if hashes: for hash_ in sorted(hashes): - line += " \\\n --hash={}".format(hash_) + line += f" \\\n --hash={hash_}" return line @@ -138,7 +133,7 @@ def as_tuple(ireq): the pinned InstallRequirement. """ if not is_pinned_requirement(ireq): - raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq)) + raise TypeError(f"Expected a pinned InstallRequirement, got {ireq}") name = key_from_ireq(ireq) version = next(iter(ireq.specifier)).version @@ -246,29 +241,6 @@ def name_from_req(req): return req.name -def fs_str(string): - """ - Convert given string to a correctly encoded filesystem string. - - On Python 2, if the input string is unicode, converts it to bytes - encoded with the filesystem encoding. - - On Python 3 returns the string as is, since Python 3 uses unicode - paths and the input string shouldn't be bytes. - - :type string: str|unicode - :rtype: str - """ - if isinstance(string, str): - return string - if isinstance(string, bytes): - raise TypeError("fs_str() argument must not be bytes") - return string.encode(_fs_encoding) - - -_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() - - def get_hashes_from_ireq(ireq): """ Given an InstallRequirement, return a list of string hashes in @@ -278,7 +250,7 @@ def get_hashes_from_ireq(ireq): result = [] for algorithm, hexdigests in ireq.hash_options.items(): for hash_ in hexdigests: - result.append("{}:{}".format(algorithm, hash_)) + result.append(f"{algorithm}:{hash_}") return result @@ -288,8 +260,8 @@ def force_text(s): """ if s is None: return "" - if not isinstance(s, six.string_types): - return six.text_type(s) + if not isinstance(s, str): + return str(s) return s @@ -322,7 +294,7 @@ def get_compile_command(click_ctx): # Re-add click-stripped '--' if any start with '-' if any(val.startswith("-") and val != "-" for val in value): right_args.append("--") - right_args.extend([shlex_quote(force_text(val)) for val in value]) + right_args.extend([shlex.quote(force_text(val)) for val in value]) continue # Get the latest option name (usually it'll be a long name) @@ -360,24 +332,24 @@ def get_compile_command(click_ctx): # There are no false-options, use true-option else: arg = option_long_name - left_args.append(shlex_quote(arg)) + left_args.append(shlex.quote(arg)) # Append to args the option with a value else: - if isinstance(val, six.string_types) and is_url(val): + if isinstance(val, str) and is_url(val): val = redact_auth_from_url(val) if option.name == "pip_args": - # shlex_quote would produce functional but noisily quoted results, + # shlex.quote() would produce functional but noisily quoted results, # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"'' # Instead, we try to get more legible quoting via repr: left_args.append( "{option}={value}".format( - option=option_long_name, value=repr(fs_str(force_text(val))) + option=option_long_name, value=repr(val) ) ) else: left_args.append( "{option}={value}".format( - option=option_long_name, value=shlex_quote(force_text(val)) + option=option_long_name, value=shlex.quote(force_text(val)) ) ) diff --git a/piptools/writer.py b/piptools/writer.py index 515df198e..1798141c4 100644 --- a/piptools/writer.py +++ b/piptools/writer.py @@ -1,11 +1,7 @@ -from __future__ import unicode_literals - import os import re from itertools import chain -from pip._vendor import six - from .click import unstyle from .logging import log from .utils import ( @@ -43,12 +39,12 @@ def _comes_from_as_string(ireq): - if isinstance(ireq.comes_from, six.string_types): + if isinstance(ireq.comes_from, str): return strip_comes_from_line_re.sub("", ireq.comes_from) return key_from_ireq(ireq.comes_from) -class OutputWriter(object): +class OutputWriter: def __init__( self, src_files, @@ -97,7 +93,7 @@ def write_header(self): compile_command = os.environ.get( "CUSTOM_COMPILE_COMMAND" ) or get_compile_command(self.click_ctx) - yield comment("# {}".format(compile_command)) + yield comment(f"# {compile_command}") yield comment("#") def write_index_options(self): @@ -106,23 +102,23 @@ def write_index_options(self): if index_url.rstrip("/") == self.default_index_url: continue flag = "--index-url" if index == 0 else "--extra-index-url" - yield "{} {}".format(flag, index_url) + yield f"{flag} {index_url}" def write_trusted_hosts(self): if self.emit_trusted_host: for trusted_host in dedup(self.trusted_hosts): - yield "--trusted-host {}".format(trusted_host) + yield f"--trusted-host {trusted_host}" def write_format_controls(self): for nb in dedup(sorted(self.format_control.no_binary)): - yield "--no-binary {}".format(nb) + yield f"--no-binary {nb}" for ob in dedup(sorted(self.format_control.only_binary)): - yield "--only-binary {}".format(ob) + yield f"--only-binary {ob}" def write_find_links(self): if self.emit_find_links: for find_link in dedup(self.find_links): - yield "--find-links {}".format(find_link) + yield f"--find-links {find_link}" def write_flags(self): emitted = False @@ -189,7 +185,7 @@ def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=No for ireq in unsafe_requirements: ireq_key = key_from_ireq(ireq) if not self.allow_unsafe: - yield comment("# {}".format(ireq_key)) + yield comment(f"# {ireq_key}") else: line = self._format_requirement( ireq, marker=markers.get(ireq_key), hashes=hashes @@ -208,8 +204,8 @@ def write(self, results, unsafe_requirements, markers, hashes): for line in self._iter_lines(results, unsafe_requirements, markers, hashes): log.info(line) if not self.dry_run: - self.dst_file.write(unstyle(line).encode("utf-8")) - self.dst_file.write(os.linesep.encode("utf-8")) + self.dst_file.write(unstyle(line).encode()) + self.dst_file.write(os.linesep.encode()) def _format_requirement(self, ireq, marker=None, hashes=None): ireq_hashes = (hashes if hashes is not None else {}).get(ireq) diff --git a/setup.cfg b/setup.cfg index 3b1cac413..710868286 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,9 +13,8 @@ classifiers = License :: OSI Approved :: BSD License Operating System :: OS Independent Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 @@ -25,7 +24,7 @@ classifiers = Topic :: System :: Systems Administration [options] -python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* +python_requires = >=3.6 setup_requires = setuptools_scm packages = find: zip_safe = false @@ -38,7 +37,6 @@ exclude = tests [options.extras_require] testing = - mock pytest pytest-rerunfailures coverage = pytest-cov @@ -48,9 +46,6 @@ console_scripts = pip-compile = piptools.scripts.compile:cli pip-sync = piptools.scripts.sync:cli -[bdist_wheel] -universal = 1 - [tool:pytest] norecursedirs = .* build dist venv test_data piptools/_compat/* testpaths = tests piptools diff --git a/tests/conftest.py b/tests/conftest.py index 6adf2cb7f..27239bb24 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -97,7 +97,7 @@ def copy_ireq_dependencies(self, source, dest): pass -class FakeInstalledDistribution(object): +class FakeInstalledDistribution: def __init__(self, line, deps=None): if deps is None: deps = [] @@ -244,7 +244,7 @@ def _make_package(name, version="0.1", install_requires=None): install_requires = [] install_requires_str = "[{}]".format( - ",".join("{!r}".format(package) for package in install_requires) + ",".join(f"{package!r}" for package in install_requires) ) package_dir = tmp_path / "packages" / name / version @@ -290,12 +290,12 @@ def run_setup_file(): def _run_setup_file(package_dir_path, *args): setup_file = str(package_dir_path / "setup.py") - with open(os.devnull, "w") as fp: - return subprocess.check_call( - (sys.executable, setup_file) + args, - cwd=str(package_dir_path), - stdout=fp, - ) # nosec + return subprocess.run( + (sys.executable, setup_file) + args, + cwd=str(package_dir_path), + stdout=subprocess.DEVNULL, + check=True, + ) # nosec return _run_setup_file diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 22106268e..7420c2a14 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -3,15 +3,14 @@ import subprocess import sys from textwrap import dedent +from unittest import mock -import mock import pytest from pip._internal.utils.urls import path_to_url from piptools.scripts.compile import cli from .constants import MINIMAL_WHEELS_PATH, PACKAGES_PATH -from .utils import invoke @pytest.fixture(autouse=True) @@ -297,7 +296,7 @@ def test_realistic_complex_sub_dependencies(runner): wheels_dir = "wheels" # make a temporary wheel of a fake package - subprocess.check_output( + subprocess.run( [ "pip", "wheel", @@ -305,7 +304,8 @@ def test_realistic_complex_sub_dependencies(runner): "-w", wheels_dir, os.path.join(PACKAGES_PATH, "fake_with_deps", "."), - ] + ], + check=True, ) with open("requirements.in", "w") as req_in: @@ -319,13 +319,15 @@ def test_realistic_complex_sub_dependencies(runner): def test_run_as_module_compile(): """piptools can be run as ``python -m piptools ...``.""" - status, output = invoke([sys.executable, "-m", "piptools", "compile", "--help"]) + result = subprocess.run( + [sys.executable, "-m", "piptools", "compile", "--help"], + stdout=subprocess.PIPE, + check=True, + ) # Should have run pip-compile successfully. - output = output.decode("utf-8") - assert output.startswith("Usage:") - assert "Compiles requirements.txt from requirements.in" in output - assert status == 0 + assert result.stdout.startswith(b"Usage:") + assert b"Compiles requirements.txt from requirements.in" in result.stdout def test_editable_package(pip_conf, runner): @@ -701,7 +703,7 @@ def test_generate_hashes_with_editable(pip_conf, runner): small_fake_package_dir = os.path.join(PACKAGES_PATH, "small_fake_with_deps") small_fake_package_url = path_to_url(small_fake_package_dir) with open("requirements.in", "w") as fp: - fp.write("-e {}\n".format(small_fake_package_url)) + fp.write(f"-e {small_fake_package_url}\n") out = runner.invoke(cli, ["--no-annotate", "--generate-hashes"]) expected = ( "-e {}\n" @@ -860,10 +862,10 @@ def test_no_candidates_pre(pip_conf, runner): @pytest.mark.parametrize( ("url", "expected_url"), ( - pytest.param("https://example.com", "https://example.com", id="regular url"), + pytest.param("https://example.com", b"https://example.com", id="regular url"), pytest.param( "https://username:password@example.com", - "https://username:****@example.com", + b"https://username:****@example.com", id="url with credentials", ), ), @@ -883,11 +885,13 @@ def test_default_index_url(make_pip_conf, url, expected_url): ) ) - status, output = invoke([sys.executable, "-m", "piptools", "compile", "--help"]) - output = output.decode("utf-8") + result = subprocess.run( + [sys.executable, "-m", "piptools", "compile", "--help"], + stdout=subprocess.PIPE, + check=True, + ) - assert status == 0 - assert expected_url in output + assert expected_url in result.stdout def test_stdin_without_output_file(runner): @@ -1045,7 +1049,8 @@ def test_cert_option(parse_requirements, runner, option, attr, expected): runner.invoke(cli, [option, expected]) # Ensure the options in parse_requirements has the expected option - assert getattr(parse_requirements.call_args.kwargs["options"], attr) == expected + args, kwargs = parse_requirements.call_args + assert getattr(kwargs["options"], attr) == expected @pytest.mark.parametrize( @@ -1064,7 +1069,8 @@ def test_build_isolation_option(parse_requirements, runner, option, expected): runner.invoke(cli, [option]) # Ensure the options in parse_requirements has the expected build_isolation option - assert parse_requirements.call_args.kwargs["options"].build_isolation is expected + args, kwargs = parse_requirements.call_args + assert kwargs["options"].build_isolation is expected @mock.patch("piptools.scripts.compile.PyPIRepository") @@ -1078,7 +1084,8 @@ def test_forwarded_args(PyPIRepository, runner): cli_args = ("--no-annotate", "--generate-hashes") pip_args = ("--no-color", "--isolated", "--disable-pip-version-check") runner.invoke(cli, cli_args + ("--pip-args", " ".join(pip_args))) - assert set(pip_args).issubset(set(PyPIRepository.call_args.args[0])) + args, kwargs = PyPIRepository.call_args + assert set(pip_args).issubset(set(args[0])) @pytest.mark.parametrize( @@ -1328,7 +1335,7 @@ def test_upgrade_packages_option_subdependency( assert "small-fake-a==0.1" in stderr_lines, "small-fake-a must keep its version" assert ( upgraded_package in stderr_lines - ), "{} must be upgraded/downgraded to {}".format(current_package, upgraded_package) + ), f"{current_package} must be upgraded/downgraded to {upgraded_package}" @pytest.mark.parametrize( @@ -1517,10 +1524,8 @@ def test_duplicate_reqs_combined( make_sdist(pkg, dists_dir) with open("requirements.in", "w") as reqs_in: - reqs_in.write("file:{source_path}\n".format(source_path=test_package_2)) - reqs_in.write( - "file:{source_path}#egg=test-package-2\n".format(source_path=test_package_2) - ) + reqs_in.write(f"file:{test_package_2}\n") + reqs_in.write(f"file:{test_package_2}#egg=test-package-2\n") if output_content: with open("requirements.txt", "w") as reqs_out: diff --git a/tests/test_cli_sync.py b/tests/test_cli_sync.py index 0f9aecb33..bba09fa16 100644 --- a/tests/test_cli_sync.py +++ b/tests/test_cli_sync.py @@ -1,27 +1,28 @@ +import subprocess import sys +from unittest import mock -import mock import pytest from piptools.scripts.sync import DEFAULT_REQUIREMENTS_FILE, cli -from .utils import invoke - def test_run_as_module_sync(): """piptools can be run as ``python -m piptools ...``.""" - status, output = invoke([sys.executable, "-m", "piptools", "sync", "--help"]) + result = subprocess.run( + [sys.executable, "-m", "piptools", "sync", "--help"], + stdout=subprocess.PIPE, + check=True, + ) # Should have run pip-compile successfully. - output = output.decode("utf-8") - assert output.startswith("Usage:") - assert "Synchronize virtual environment with" in output - assert status == 0 + assert result.stdout.startswith(b"Usage:") + assert b"Synchronize virtual environment with" in result.stdout -@mock.patch("piptools.sync.check_call") -def test_quiet_option(check_call, runner): +@mock.patch("piptools.sync.run") +def test_quiet_option(run, runner): """sync command can be run with `--quiet` or `-q` flag.""" with open("requirements.txt", "w") as req_in: @@ -32,13 +33,13 @@ def test_quiet_option(check_call, runner): assert out.exit_code == 0 # for every call to pip ensure the `-q` flag is set - assert check_call.call_count == 2 - for call in check_call.call_args_list: + assert run.call_count == 2 + for call in run.call_args_list: assert "-q" in call[0][0] -@mock.patch("piptools.sync.check_call") -def test_quiet_option_when_up_to_date(check_call, runner): +@mock.patch("piptools.sync.run") +def test_quiet_option_when_up_to_date(run, runner): """ Sync should output nothing when everything is up to date and quiet option is set. """ @@ -50,7 +51,7 @@ def test_quiet_option_when_up_to_date(check_call, runner): assert not out.stderr_bytes assert out.exit_code == 0 - check_call.assert_not_called() + run.assert_not_called() def test_no_requirements_file(runner): @@ -86,7 +87,7 @@ def test_force_files_with_dot_in_extension(runner): with open("requirements.in", "w") as req_in: req_in.write("six==1.10.0") - with mock.patch("piptools.sync.check_call"): + with mock.patch("piptools.sync.run"): out = runner.invoke(cli, ["requirements.in", "--force"]) assert "WARNING: Some input files have the .in extension" in out.stderr @@ -113,7 +114,7 @@ def test_merge_error(req_lines, should_raise, runner): for line in req_lines: req_in.write(line + "\n") - with mock.patch("piptools.sync.check_call"): + with mock.patch("piptools.sync.run"): out = runner.invoke(cli, ["-n"]) if should_raise: @@ -157,8 +158,8 @@ def test_merge_error(req_lines, should_raise, runner): ), ), ) -@mock.patch("piptools.sync.check_call") -def test_pip_install_flags(check_call, cli_flags, expected_install_flags, runner): +@mock.patch("piptools.sync.run") +def test_pip_install_flags(run, cli_flags, expected_install_flags, runner): """ Test the cli flags have to be passed to the pip install command. """ @@ -167,7 +168,7 @@ def test_pip_install_flags(check_call, cli_flags, expected_install_flags, runner runner.invoke(cli, cli_flags) - call_args = [call[0][0] for call in check_call.call_args_list] + call_args = [call[0][0] for call in run.call_args_list] called_install_options = [args[6:] for args in call_args if args[3] == "install"] assert called_install_options == [expected_install_flags], "Called args: {}".format( call_args @@ -186,8 +187,8 @@ def test_pip_install_flags(check_call, cli_flags, expected_install_flags, runner ["--only-binary", ":all:"], ), ) -@mock.patch("piptools.sync.check_call") -def test_pip_install_flags_in_requirements_file(check_call, runner, install_flags): +@mock.patch("piptools.sync.run") +def test_pip_install_flags_in_requirements_file(run, runner, install_flags): """ Test the options from requirements.txt file pass to the pip install command. """ @@ -199,15 +200,15 @@ def test_pip_install_flags_in_requirements_file(check_call, runner, install_flag assert out.exit_code == 0, out # Make sure pip install command has expected options - call_args = [call[0][0] for call in check_call.call_args_list] + call_args = [call[0][0] for call in run.call_args_list] called_install_options = [args[6:] for args in call_args if args[3] == "install"] assert called_install_options == [install_flags], "Called args: {}".format( call_args ) -@mock.patch("piptools.sync.check_call") -def test_sync_ask_declined(check_call, runner): +@mock.patch("piptools.sync.run") +def test_sync_ask_declined(run, runner): """ Make sure nothing is installed if the confirmation is declined """ @@ -216,11 +217,11 @@ def test_sync_ask_declined(check_call, runner): runner.invoke(cli, ["--ask"], input="n\n") - check_call.assert_not_called() + run.assert_not_called() -@mock.patch("piptools.sync.check_call") -def test_sync_ask_accepted(check_call, runner): +@mock.patch("piptools.sync.run") +def test_sync_ask_accepted(run, runner): """ Make sure pip is called when the confirmation is accepted (even if --dry-run is given) @@ -230,7 +231,7 @@ def test_sync_ask_accepted(check_call, runner): runner.invoke(cli, ["--ask", "--dry-run"], input="y\n") - assert check_call.call_count == 2 + assert run.call_count == 2 def test_sync_dry_run_returns_non_zero_exit_code(runner): diff --git a/tests/test_compat.py b/tests/test_compat.py deleted file mode 100644 index f001f366b..000000000 --- a/tests/test_compat.py +++ /dev/null @@ -1,21 +0,0 @@ -import os - -import pytest - -from piptools._compat import makedirs - - -def test_makedirs_exist_ok_true(tmpdir): - path = str(tmpdir / "test") - makedirs(path, exist_ok=True) - assert os.path.isdir(path) - makedirs(path, exist_ok=True) - assert os.path.isdir(path) - - -def test_makedirs_exist_ok_false(tmpdir): - path = str(tmpdir / "test") - makedirs(path) - assert os.path.isdir(path) - with pytest.raises(OSError, match="exists"): - makedirs(path) diff --git a/tests/test_locations.py b/tests/test_locations.py index b875abbf0..89e68a984 100644 --- a/tests/test_locations.py +++ b/tests/test_locations.py @@ -1,8 +1,7 @@ import os +import subprocess import sys -from .utils import invoke - def test_remove_legacy_cache_dir(): """ @@ -10,8 +9,8 @@ def test_remove_legacy_cache_dir(): """ os.mkdir(os.path.expanduser("~/.pip-tools")) - status, output = invoke([sys.executable, "-m", "piptools"]) + result = subprocess.run( + [sys.executable, "-m", "piptools"], stdout=subprocess.PIPE, check=True + ) - output = output.decode("utf-8") - assert output.startswith("Removing old cache dir") - assert status == 0 + assert result.stdout.startswith(b"Removing old cache dir") diff --git a/tests/test_repository_local.py b/tests/test_repository_local.py index baf51aae8..74e6a6269 100644 --- a/tests/test_repository_local.py +++ b/tests/test_repository_local.py @@ -70,7 +70,7 @@ def test_toggle_reuse_hashes_local_repository( class FakeRepositoryChecksForCopy(FakeRepository): def __init__(self): - super(FakeRepositoryChecksForCopy, self).__init__() + super().__init__() self.copied = [] def copy_ireq_dependencies(self, source, dest): diff --git a/tests/test_repository_pypi.py b/tests/test_repository_pypi.py index dd18d9a2b..1c5d4730f 100644 --- a/tests/test_repository_pypi.py +++ b/tests/test_repository_pypi.py @@ -1,6 +1,6 @@ import os +from unittest import mock -import mock import pytest from pip._internal.models.link import Link from pip._internal.utils.urls import path_to_url diff --git a/tests/test_sync.py b/tests/test_sync.py index e4cc1fd8d..6d560b759 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -2,8 +2,8 @@ import sys import tempfile from collections import Counter +from unittest import mock -import mock import pytest from pip._internal.utils.urls import path_to_url @@ -173,9 +173,7 @@ def test_diff_should_install_with_markers(from_line): reqs = [from_line("subprocess32==3.2.7 ; python_version=='2.7'")] to_install, to_uninstall = diff(reqs, installed) - assert {str(x.req) for x in to_install} == ( - {"subprocess32==3.2.7"} if sys.version.startswith("2.7") else set() - ) + assert to_install == set() assert to_uninstall == set() @@ -185,9 +183,7 @@ def test_diff_should_uninstall_with_markers(fake_dist, from_line): to_install, to_uninstall = diff(reqs, installed) assert to_install == set() - assert to_uninstall == ( - set() if sys.version.startswith("2.7") else {"subprocess32"} - ) + assert to_uninstall == {"subprocess32"} def test_diff_leave_packaging_packages_alone(fake_dist, from_line): @@ -268,16 +264,17 @@ def test_diff_with_no_url_versions(fake_dist, from_line): def test_sync_install_temporary_requirement_file( from_line, from_editable, mocked_tmp_req_file ): - with mock.patch("piptools.sync.check_call") as check_call: + with mock.patch("piptools.sync.run") as run: to_install = {from_line("django==1.8")} sync(to_install, set()) - check_call.assert_called_once_with( - [sys.executable, "-m", "pip", "install", "-r", mocked_tmp_req_file.name] + run.assert_called_once_with( + [sys.executable, "-m", "pip", "install", "-r", mocked_tmp_req_file.name], + check=True, ) def test_temporary_requirement_file_deleted(from_line, from_editable, mocked_tmp_file): - with mock.patch("piptools.sync.check_call"): + with mock.patch("piptools.sync.run"): to_install = {from_line("django==1.8")} with mock.patch("os.unlink") as unlink: @@ -287,7 +284,7 @@ def test_temporary_requirement_file_deleted(from_line, from_editable, mocked_tmp def test_sync_requirement_file(from_line, from_editable, mocked_tmp_req_file): - with mock.patch("piptools.sync.check_call"): + with mock.patch("piptools.sync.run"): to_install = { from_line("django==1.8"), from_editable("git+git://fake.org/x/y.git#egg=y"), @@ -311,7 +308,7 @@ def test_sync_requirement_file(from_line, from_editable, mocked_tmp_req_file): def test_sync_requirement_file_with_hashes( from_line, from_editable, mocked_tmp_req_file ): - with mock.patch("piptools.sync.check_call"): + with mock.patch("piptools.sync.run"): to_install = { from_line( "django==1.8", @@ -377,16 +374,16 @@ def test_sync_up_to_date(runner): assert stdout.getvalue().decode().splitlines() == ["Everything up-to-date"] -@mock.patch("piptools.sync.check_call") -def test_sync_verbose(check_call, from_line): +@mock.patch("piptools.sync.run") +def test_sync_verbose(run, from_line): """ The -q option has to be passed to every pip calls. """ sync({from_line("django==1.8")}, {from_line("click==4.0")}) - assert check_call.call_count == 2 - for call in check_call.call_args_list: - check_call_args = call[0][0] - assert "-q" not in check_call_args + assert run.call_count == 2 + for call in run.call_args_list: + run_args = call[0][0] + assert "-q" not in run_args @pytest.mark.parametrize( @@ -419,9 +416,9 @@ def test_sync_dry_run(runner, from_line, to_install, to_uninstall, expected_mess (set(), {"django==1.8", "click==4.0"}, "Would uninstall:"), ), ) -@mock.patch("piptools.sync.check_call") +@mock.patch("piptools.sync.run") def test_sync_ask_declined( - check_call, runner, from_line, to_install, to_uninstall, expected_message + run, runner, from_line, to_install, to_uninstall, expected_message ): """ Sync with --ask option does a dry run if the user declines @@ -438,12 +435,12 @@ def test_sync_ask_declined( " django==1.8", "Would you like to proceed with these changes? [y/N]: n", ] - check_call.assert_not_called() + run.assert_not_called() @pytest.mark.parametrize("dry_run", (True, False)) -@mock.patch("piptools.sync.check_call") -def test_sync_ask_accepted(check_call, runner, from_line, dry_run): +@mock.patch("piptools.sync.run") +def test_sync_ask_accepted(run, runner, from_line, dry_run): """ pip should be called as normal when the user confirms, even with dry_run """ @@ -456,7 +453,7 @@ def test_sync_ask_accepted(check_call, runner, from_line, dry_run): dry_run=dry_run, ) - assert check_call.call_count == 2 + assert run.call_count == 2 assert stdout.getvalue().decode().splitlines() == [ "Would uninstall:", " click==4.0", @@ -466,11 +463,12 @@ def test_sync_ask_accepted(check_call, runner, from_line, dry_run): ] -@mock.patch("piptools.sync.check_call") -def test_sync_uninstall_pip_command(check_call): +@mock.patch("piptools.sync.run") +def test_sync_uninstall_pip_command(run): to_uninstall = ["six", "django", "pytz", "click"] sync(set(), to_uninstall) - check_call.assert_called_once_with( - [sys.executable, "-m", "pip", "uninstall", "-y"] + sorted(to_uninstall) + run.assert_called_once_with( + [sys.executable, "-m", "pip", "uninstall", "-y"] + sorted(to_uninstall), + check=True, ) diff --git a/tests/test_top_level_editable.py b/tests/test_top_level_editable.py index e6c51d90a..9391e905e 100644 --- a/tests/test_top_level_editable.py +++ b/tests/test_top_level_editable.py @@ -14,7 +14,7 @@ def get_dependencies(self, ireq): if not ireq.editable: return set() - return super(MockedPyPIRepository, self).get_dependencies(ireq) + return super().get_dependencies(ireq) @pytest.fixture diff --git a/tests/test_utils.py b/tests/test_utils.py index 0332da911..b1ba984e3 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,12 +1,8 @@ -# coding: utf-8 -from __future__ import unicode_literals - import logging import os +import shlex import pytest -from pip._vendor import six -from pip._vendor.six.moves import shlex_quote from piptools.scripts.compile import cli as compile_cli from piptools.utils import ( @@ -16,7 +12,6 @@ force_text, format_requirement, format_specifier, - fs_str, get_compile_command, get_hashes_from_ireq, is_pinned_requirement, @@ -208,17 +203,6 @@ def test_name_from_req_with_project_name(from_line): assert name_from_req(ireq.req) == "bar" -def test_fs_str(): - assert fs_str("some path component/Something") == "some path component/Something" - assert isinstance(fs_str("whatever"), str) - - -@pytest.mark.skipif(six.PY2, reason="Not supported in py2") -def test_fs_str_with_bytes(): - with pytest.raises(TypeError, match=r"^fs_str\(\) argument must not be bytes$"): - fs_str(b"whatever") - - @pytest.mark.parametrize( ("value", "expected_text"), ((None, ""), (42, "42"), ("foo", "foo"), ("bãr", "bãr")) ) @@ -336,7 +320,7 @@ def test_get_compile_command_with_files(tmpdir_cwd, filename): assert get_compile_command( ctx ) == "pip-compile --output-file=requirements.txt {src_file}".format( - src_file=shlex_quote(path) + src_file=shlex.quote(path) ) diff --git a/tests/utils.py b/tests/utils.py index ac44b3653..32c565e15 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,18 +1,4 @@ import os -import subprocess - - -def invoke(command): - """Invoke sub-process.""" - try: - output = subprocess.check_output(command, stderr=subprocess.STDOUT) - status = 0 - except subprocess.CalledProcessError as error: # pragma: no cover - output = error.output - status = error.returncode - - return status, output - # NOTE: keep in sync with "passenv" in tox.ini CI_VARIABLES = {"CI", "GITHUB_ACTIONS"} diff --git a/tox.ini b/tox.ini index 39415e4c0..d3f9d736c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] envlist = # NOTE: keep this in sync with the env list in .github/workflows/ci.yml. - py{27,36,37,38,39,py,py3}-pip{20.1,20.2,20.3,previous,latest,master}-coverage + py{36,37,38,39,py,py3}-pip{20.1,20.2,20.3,previous,latest,master}-coverage checkqa readme skip_missing_interpreters = True From 921bbfd00d138036a4a7c8e4d72d764ad75c9e4d Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Mon, 30 Nov 2020 05:45:53 -0800 Subject: [PATCH 4/9] Use flynt to replace more f-strings --- piptools/cache.py | 2 +- piptools/exceptions.py | 4 ++-- piptools/locations.py | 4 +--- piptools/repositories/pypi.py | 24 +++++------------------- piptools/resolver.py | 4 ++-- piptools/scripts/compile.py | 4 +--- piptools/sync.py | 2 +- piptools/utils.py | 12 +++--------- piptools/writer.py | 2 +- tests/conftest.py | 24 ++++++++---------------- tests/test_cli_compile.py | 6 ++---- tests/test_cli_sync.py | 4 +--- tests/test_repository_pypi.py | 2 +- tests/test_utils.py | 7 +++---- 14 files changed, 32 insertions(+), 69 deletions(-) diff --git a/piptools/cache.py b/piptools/cache.py index f5d7aeeeb..7f4413154 100644 --- a/piptools/cache.py +++ b/piptools/cache.py @@ -92,7 +92,7 @@ def as_cache_key(self, ireq): if not extras: extras_string = "" else: - extras_string = "[{}]".format(",".join(extras)) + extras_string = f"[{','.join(extras)}]" return name, f"{version}{extras_string}" def read_cache(self): diff --git a/piptools/exceptions.py b/piptools/exceptions.py index 715129e76..fdd9b3c11 100644 --- a/piptools/exceptions.py +++ b/piptools/exceptions.py @@ -25,7 +25,7 @@ def __str__(self): lines = [f"Could not find a version that matches {self.ireq}"] if versions: - lines.append("Tried: {}".format(", ".join(versions))) + lines.append(f"Tried: {', '.join(versions)}") if pre_versions: if self.finder.allow_all_prereleases: @@ -33,7 +33,7 @@ def __str__(self): else: line = "Skipped" - line += " pre-versions: {}".format(", ".join(pre_versions)) + line += f" pre-versions: {', '.join(pre_versions)}" lines.append(line) if versions or pre_versions: diff --git a/piptools/locations.py b/piptools/locations.py index 9ca0ffe43..40f4072e7 100644 --- a/piptools/locations.py +++ b/piptools/locations.py @@ -17,9 +17,7 @@ if os.path.exists(LEGACY_CACHE_DIR): secho( - "Removing old cache dir {} (new cache dir is {})".format( - LEGACY_CACHE_DIR, CACHE_DIR - ), + f"Removing old cache dir {LEGACY_CACHE_DIR} (new cache dir is {CACHE_DIR})", fg="yellow", ) rmtree(LEGACY_CACHE_DIR) diff --git a/piptools/repositories/pypi.py b/piptools/repositories/pypi.py index ab6724f90..a1b79ed42 100644 --- a/piptools/repositories/pypi.py +++ b/piptools/repositories/pypi.py @@ -218,9 +218,7 @@ def get_dependencies(self, ireq): ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq) ): raise TypeError( - "Expected url, pinned or editable InstallRequirement, got {}".format( - ireq - ) + f"Expected url, pinned or editable InstallRequirement, got {ireq}" ) if ireq not in self._dependencies_cache: @@ -271,11 +269,7 @@ def _get_project(self, ireq): try: response = self.session.get(url) except RequestException as e: - log.debug( - "Fetch package info from PyPI failed: {url}: {e}".format( - url=url, e=e - ) - ) + log.debug(f"Fetch package info from PyPI failed: {url}: {e}") continue # Skip this PyPI server, because there is no package @@ -286,11 +280,7 @@ def _get_project(self, ireq): try: data = response.json() except ValueError as e: - log.debug( - "Cannot parse JSON response from PyPI: {url}: {e}".format( - url=url, e=e - ) - ) + log.debug(f"Cannot parse JSON response from PyPI: {url}: {e}") continue return data return None @@ -368,9 +358,7 @@ def _get_hashes_from_pypi(self, ireq): try: hashes = { - "{algo}:{digest}".format( - algo=FAVORITE_HASH, digest=file_["digests"][FAVORITE_HASH] - ) + f"{FAVORITE_HASH}:{file_['digests'][FAVORITE_HASH]}" for file_ in release_files if file_["packagetype"] in self.HASHABLE_PACKAGE_TYPES } @@ -408,9 +396,7 @@ def _get_file_hash(self, link): # Choose a context manager depending on verbosity if log.verbosity >= 1: iter_length = f.size / FILE_CHUNK_SIZE if f.size else None - bar_template = "{prefix} |%(bar)s| %(info)s".format( - prefix=" " * log.current_indent - ) + bar_template = f"{' ' * log.current_indent} |%(bar)s| %(info)s" context_manager = progressbar( chunks, length=iter_length, diff --git a/piptools/resolver.py b/piptools/resolver.py index 6fe9f3a6b..31b546381 100644 --- a/piptools/resolver.py +++ b/piptools/resolver.py @@ -164,7 +164,7 @@ def resolve(self, max_rounds=10): ) log.debug("") - log.debug(magenta("{:^60}".format(f"ROUND {current_round}"))) + log.debug(magenta(f"{f'ROUND {current_round}':^60}")) # If a package version (foo==2.0) was built in a previous round, # and in this round a different version of foo needs to be built # (i.e. foo==1.0), the directory will exist already, which will @@ -372,7 +372,7 @@ def _iter_dependencies(self, ireq): # from there if ireq not in self.dependency_cache: log.debug( - "{} not in cache, need to check index".format(format_requirement(ireq)), + f"{format_requirement(ireq)} not in cache, need to check index", fg="yellow", ) dependencies = self.repository.get_dependencies(ireq) diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py index abd7b6fd6..f9e26ea0a 100755 --- a/piptools/scripts/compile.py +++ b/piptools/scripts/compile.py @@ -383,9 +383,7 @@ def cli( dist = run_setup(src_file) tmpfile.write("\n".join(dist.install_requires)) - comes_from = "{name} ({filename})".format( - name=dist.get_name(), filename=src_file - ) + comes_from = f"{dist.get_name()} ({src_file})" else: tmpfile.write(sys.stdin.read()) comes_from = "-r -" diff --git a/piptools/sync.py b/piptools/sync.py index 7a92b1ef2..a53063247 100644 --- a/piptools/sync.py +++ b/piptools/sync.py @@ -174,7 +174,7 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) if to_install: click.echo("Would install:") for ireq in sorted(to_install, key=key_from_ireq): - click.echo(" {}".format(format_requirement(ireq))) + click.echo(f" {format_requirement(ireq)}") exit_code = 1 diff --git a/piptools/utils.py b/piptools/utils.py index 49cf998c7..52bf06cfd 100644 --- a/piptools/utils.py +++ b/piptools/utils.py @@ -52,7 +52,7 @@ def make_install_requirement(name, version, extras, constraint=False): extras_string = "" if extras: # Sort extras for stability - extras_string = "[{}]".format(",".join(sorted(extras))) + extras_string = f"[{','.join(sorted(extras))}]" return install_req_from_line( str(f"{name}{extras_string}=={version}"), constraint=constraint @@ -341,16 +341,10 @@ def get_compile_command(click_ctx): # shlex.quote() would produce functional but noisily quoted results, # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"'' # Instead, we try to get more legible quoting via repr: - left_args.append( - "{option}={value}".format( - option=option_long_name, value=repr(val) - ) - ) + left_args.append(f"{option_long_name}={repr(val)}") else: left_args.append( - "{option}={value}".format( - option=option_long_name, value=shlex.quote(force_text(val)) - ) + f"{option_long_name}={shlex.quote(force_text(val))}" ) return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args)) diff --git a/piptools/writer.py b/piptools/writer.py index 1798141c4..79796f4d4 100644 --- a/piptools/writer.py +++ b/piptools/writer.py @@ -235,5 +235,5 @@ def _format_requirement(self, ireq, marker=None, hashes=None): for source in required_by: annotation_lines.append(" # " + source) annotation = "\n".join(annotation_lines) - line = "{}\n{}".format(line, comment(annotation)) + line = f"{line}\n{comment(annotation)}" return line diff --git a/tests/conftest.py b/tests/conftest.py index 27239bb24..aba28d14f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -207,13 +207,11 @@ def _make_pip_conf(content): def pip_conf(make_pip_conf): return make_pip_conf( dedent( - """\ + f"""\ [global] no-index = true - find-links = {wheels_path} - """.format( - wheels_path=MINIMAL_WHEELS_PATH - ) + find-links = {MINIMAL_WHEELS_PATH} + """ ) ) @@ -222,13 +220,11 @@ def pip_conf(make_pip_conf): def pip_with_index_conf(make_pip_conf): return make_pip_conf( dedent( - """\ + f"""\ [global] index-url = http://example.com - find-links = {wheels_path} - """.format( - wheels_path=MINIMAL_WHEELS_PATH - ) + find-links = {MINIMAL_WHEELS_PATH} + """ ) ) @@ -254,7 +250,7 @@ def _make_package(name, version="0.1", install_requires=None): with open(setup_file, "w") as fp: fp.write( dedent( - """\ + f"""\ from setuptools import setup setup( name={name!r}, @@ -264,11 +260,7 @@ def _make_package(name, version="0.1", install_requires=None): url="https://github.com/jazzband/pip-tools", install_requires={install_requires_str}, ) - """.format( - name=name, - version=version, - install_requires_str=install_requires_str, - ) + """ ) ) diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 7420c2a14..82b0a5654 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -876,12 +876,10 @@ def test_default_index_url(make_pip_conf, url, expected_url): """ make_pip_conf( dedent( - """\ + f"""\ [global] index-url = {url} - """.format( - url=url - ) + """ ) ) diff --git a/tests/test_cli_sync.py b/tests/test_cli_sync.py index bba09fa16..fe4fc94e1 100644 --- a/tests/test_cli_sync.py +++ b/tests/test_cli_sync.py @@ -202,9 +202,7 @@ def test_pip_install_flags_in_requirements_file(run, runner, install_flags): # Make sure pip install command has expected options call_args = [call[0][0] for call in run.call_args_list] called_install_options = [args[6:] for args in call_args if args[3] == "install"] - assert called_install_options == [install_flags], "Called args: {}".format( - call_args - ) + assert called_install_options == [install_flags], f"Called args: {call_args}" @mock.patch("piptools.sync.run") diff --git a/tests/test_repository_pypi.py b/tests/test_repository_pypi.py index 1c5d4730f..75abc8100 100644 --- a/tests/test_repository_pypi.py +++ b/tests/test_repository_pypi.py @@ -377,7 +377,7 @@ def test_name_collision(from_line, pypi_repository, make_package, make_sdist, tm make_sdist(pkg, pkg_path, "--formats=zip") os.rename( - os.path.join(pkg_path, "{}-{}.zip".format(pkg_name, "0.1")), + os.path.join(pkg_path, f"{pkg_name}-0.1.zip"), os.path.join(pkg_path, "master.zip"), ) diff --git a/tests/test_utils.py b/tests/test_utils.py index b1ba984e3..1784a7828 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -317,10 +317,9 @@ def test_get_compile_command_with_files(tmpdir_cwd, filename): args = [path, "--output-file", "requirements.txt"] with compile_cli.make_context("pip-compile", args) as ctx: - assert get_compile_command( - ctx - ) == "pip-compile --output-file=requirements.txt {src_file}".format( - src_file=shlex.quote(path) + assert ( + get_compile_command(ctx) + == f"pip-compile --output-file=requirements.txt {shlex.quote(path)}" ) From b81f222f81ed7ffe1438d3c40ac9ffddce74e3e3 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Thu, 3 Dec 2020 16:43:34 -0800 Subject: [PATCH 5/9] Use unpacking generalizations throughout --- piptools/resolver.py | 4 +--- piptools/sync.py | 39 ++++++++++++++++++++++++++++----------- piptools/utils.py | 2 +- tests/conftest.py | 2 +- tests/test_cli_compile.py | 8 ++++---- tests/test_sync.py | 2 +- 6 files changed, 36 insertions(+), 21 deletions(-) diff --git a/piptools/resolver.py b/piptools/resolver.py index 31b546381..f53493190 100644 --- a/piptools/resolver.py +++ b/piptools/resolver.py @@ -73,9 +73,7 @@ def combine_install_requirements(repository, ireqs): repository.copy_ireq_dependencies(ireq, combined_ireq) combined_ireq.constraint &= ireq.constraint # Return a sorted, de-duped tuple of extras - combined_ireq.extras = tuple( - sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))) - ) + combined_ireq.extras = tuple(sorted({*combined_ireq.extras, *ireq.extras})) # InstallRequirements objects are assumed to come from only one source, and # so they support only a single comes_from entry. This function breaks this diff --git a/piptools/sync.py b/piptools/sync.py index a53063247..4d6cae9ab 100644 --- a/piptools/sync.py +++ b/piptools/sync.py @@ -19,11 +19,15 @@ key_from_req, ) -PACKAGES_TO_IGNORE = ( - ["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"] - + list(stdlib_pkgs) - + list(DEV_PKGS) -) +PACKAGES_TO_IGNORE = [ + "-markerlib", + "pip", + "pip-tools", + "pip-review", + "pkg-resources", + *stdlib_pkgs, + *DEV_PKGS, +] def dependency_tree(installed_keys, root_key): @@ -185,9 +189,15 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) if not dry_run: if to_uninstall: run( # nosec - [sys.executable, "-m", "pip", "uninstall", "-y"] - + pip_flags - + sorted(to_uninstall), + [ + sys.executable, + "-m", + "pip", + "uninstall", + "-y", + *pip_flags, + *sorted(to_uninstall), + ], check=True, ) @@ -207,9 +217,16 @@ def sync(to_install, to_uninstall, dry_run=False, install_flags=None, ask=False) try: run( # nosec - [sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name] - + pip_flags - + install_flags, + [ + sys.executable, + "-m", + "pip", + "install", + "-r", + tmp_req_file.name, + *pip_flags, + *install_flags, + ], check=True, ) finally: diff --git a/piptools/utils.py b/piptools/utils.py index 52bf06cfd..15ec21fe1 100644 --- a/piptools/utils.py +++ b/piptools/utils.py @@ -347,4 +347,4 @@ def get_compile_command(click_ctx): f"{option_long_name}={shlex.quote(force_text(val))}" ) - return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args)) + return " ".join(["pip-compile", *sorted(left_args), *sorted(right_args)]) diff --git a/tests/conftest.py b/tests/conftest.py index aba28d14f..bb541e753 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -283,7 +283,7 @@ def run_setup_file(): def _run_setup_file(package_dir_path, *args): setup_file = str(package_dir_path / "setup.py") return subprocess.run( - (sys.executable, setup_file) + args, + [sys.executable, setup_file, *args], cwd=str(package_dir_path), stdout=subprocess.DEVNULL, check=True, diff --git a/tests/test_cli_compile.py b/tests/test_cli_compile.py index 82b0a5654..12db0f422 100644 --- a/tests/test_cli_compile.py +++ b/tests/test_cli_compile.py @@ -249,7 +249,7 @@ def test_trusted_host_envvar(monkeypatch, pip_conf, runner): def test_all_no_emit_options(runner, options): with open("requirements.in", "w"): pass - out = runner.invoke(cli, ["--no-header"] + options) + out = runner.invoke(cli, ["--no-header", *options]) assert out.stderr.strip().splitlines() == [] @@ -1081,7 +1081,7 @@ def test_forwarded_args(PyPIRepository, runner): cli_args = ("--no-annotate", "--generate-hashes") pip_args = ("--no-color", "--isolated", "--disable-pip-version-check") - runner.invoke(cli, cli_args + ("--pip-args", " ".join(pip_args))) + runner.invoke(cli, [*cli_args, "--pip-args", " ".join(pip_args)]) args, kwargs = PyPIRepository.call_args assert set(pip_args).issubset(set(args[0])) @@ -1130,7 +1130,7 @@ def test_dry_run_option(pip_conf, runner, add_options): with open("requirements.in", "w") as req_in: req_in.write("small-fake-a\n") - out = runner.invoke(cli, ["--no-annotate", "--dry-run"] + add_options) + out = runner.invoke(cli, ["--no-annotate", "--dry-run", *add_options]) assert out.exit_code == 0, out.stderr assert "small-fake-a==0.2" in out.stderr.splitlines() @@ -1165,7 +1165,7 @@ def test_dry_run_doesnt_touch_output_file( before_compile_mtime = os.stat("requirements.txt").st_mtime - out = runner.invoke(cli, ["--no-annotate", "--dry-run"] + add_options) + out = runner.invoke(cli, ["--no-annotate", "--dry-run", *add_options]) assert out.exit_code == 0, out.stderr assert expected_cli_output_package in out.stderr.splitlines() diff --git a/tests/test_sync.py b/tests/test_sync.py index 6d560b759..7964282d1 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -469,6 +469,6 @@ def test_sync_uninstall_pip_command(run): sync(set(), to_uninstall) run.assert_called_once_with( - [sys.executable, "-m", "pip", "uninstall", "-y"] + sorted(to_uninstall), + [sys.executable, "-m", "pip", "uninstall", "-y", *sorted(to_uninstall)], check=True, ) From eac81c522417efd235a746164bff8aff9355218e Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Fri, 4 Dec 2020 01:55:29 -0800 Subject: [PATCH 6/9] Simplify tests using standard pytest tooling - Monkey patch sys.stdin using io.StringIO(). This avoids the need to use yet-another-tool rather than the tools providing by the test library. - This reduces indentation to slightly improve readability. - Allows for more accurate assertion of stdout and stderr. --- tests/test_sync.py | 59 +++++++++++++++++++++++++--------------------- 1 file changed, 32 insertions(+), 27 deletions(-) diff --git a/tests/test_sync.py b/tests/test_sync.py index 7964282d1..52bea8d4f 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -1,3 +1,4 @@ +import io import os import sys import tempfile @@ -365,13 +366,14 @@ def test_sync_requirement_file_with_hashes( mocked_tmp_req_file.write.assert_called_once_with(expected) -def test_sync_up_to_date(runner): +def test_sync_up_to_date(capsys, runner): """ Everything up-to-date should be printed. """ - with runner.isolation() as (stdout, _): - sync(set(), set()) - assert stdout.getvalue().decode().splitlines() == ["Everything up-to-date"] + sync(set(), set()) + captured = capsys.readouterr() + assert captured.out.splitlines() == ["Everything up-to-date"] + assert captured.err == "" @mock.patch("piptools.sync.run") @@ -393,20 +395,21 @@ def test_sync_verbose(run, from_line): (set(), {"django==1.8", "click==4.0"}, "Would uninstall:"), ), ) -def test_sync_dry_run(runner, from_line, to_install, to_uninstall, expected_message): +def test_sync_dry_run( + capsys, runner, from_line, to_install, to_uninstall, expected_message +): """ Sync with --dry-run option prints what's is going to be installed/uninstalled. """ to_install = {from_line(pkg) for pkg in to_install} - - with runner.isolation() as (stdout, _): - sync(to_install, to_uninstall, dry_run=True) - - assert stdout.getvalue().decode().splitlines() == [ + sync(to_install, to_uninstall, dry_run=True) + captured = capsys.readouterr() + assert captured.out.splitlines() == [ expected_message, " click==4.0", " django==1.8", ] + captured.err == "" @pytest.mark.parametrize( @@ -418,49 +421,51 @@ def test_sync_dry_run(runner, from_line, to_install, to_uninstall, expected_mess ) @mock.patch("piptools.sync.run") def test_sync_ask_declined( - run, runner, from_line, to_install, to_uninstall, expected_message + run, monkeypatch, capsys, from_line, to_install, to_uninstall, expected_message ): """ Sync with --ask option does a dry run if the user declines """ + monkeypatch.setattr("sys.stdin", io.StringIO("n\n")) to_install = {from_line(pkg) for pkg in to_install} + sync(to_install, to_uninstall, ask=True) - with runner.isolation("n\n") as (stdout, _): - sync(to_install, to_uninstall, ask=True) - - assert stdout.getvalue().decode().splitlines() == [ + out, err = capsys.readouterr() + assert out.splitlines() == [ expected_message, " click==4.0", " django==1.8", - "Would you like to proceed with these changes? [y/N]: n", + "Would you like to proceed with these changes? [y/N]: ", ] + assert err == "" run.assert_not_called() @pytest.mark.parametrize("dry_run", (True, False)) @mock.patch("piptools.sync.run") -def test_sync_ask_accepted(run, runner, from_line, dry_run): +def test_sync_ask_accepted(run, monkeypatch, capsys, from_line, dry_run): """ pip should be called as normal when the user confirms, even with dry_run """ - - with runner.isolation("y\n") as (stdout, _): - sync( - {from_line("django==1.8")}, - {from_line("click==4.0")}, - ask=True, - dry_run=dry_run, - ) + monkeypatch.setattr("sys.stdin", io.StringIO("y\n")) + sync( + {from_line("django==1.8")}, + {from_line("click==4.0")}, + ask=True, + dry_run=dry_run, + ) assert run.call_count == 2 - assert stdout.getvalue().decode().splitlines() == [ + out, err = capsys.readouterr() + assert out.splitlines() == [ "Would uninstall:", " click==4.0", "Would install:", " django==1.8", - "Would you like to proceed with these changes? [y/N]: y", + "Would you like to proceed with these changes? [y/N]: ", ] + assert err == "" @mock.patch("piptools.sync.run") From d9ade3cade3ca8fba7d0ae113493141c9d450d48 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Fri, 4 Dec 2020 01:59:31 -0800 Subject: [PATCH 7/9] Prefer capsys instead of capfd capfd was used to workaround a compatibility issue between Python 2, Windows and Click. With Python 2 gone, can use capsys. --- tests/test_repository_local.py | 8 ++++---- tests/test_repository_pypi.py | 4 ++-- tests/test_sync.py | 12 ++++++------ tests/test_writer.py | 8 ++++---- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/tests/test_repository_local.py b/tests/test_repository_local.py index 74e6a6269..196ba5d1f 100644 --- a/tests/test_repository_local.py +++ b/tests/test_repository_local.py @@ -10,14 +10,14 @@ def test_get_hashes_local_repository_cache_miss( - capfd, pip_conf, from_line, pypi_repository + capsys, pip_conf, from_line, pypi_repository ): existing_pins = {} local_repository = LocalRequirementsRepository(existing_pins, pypi_repository) with local_repository.allow_all_wheels(): hashes = local_repository.get_hashes(from_line("small-fake-a==0.1")) assert hashes == EXPECTED - captured = capfd.readouterr() + captured = capsys.readouterr() assert captured.out == "" assert ( captured.err.strip() @@ -45,7 +45,7 @@ def test_get_hashes_local_repository_cache_hit(from_line, repository): ("reuse_hashes", "expected"), ((True, NONSENSE), (False, EXPECTED)) ) def test_toggle_reuse_hashes_local_repository( - capfd, pip_conf, from_line, pypi_repository, reuse_hashes, expected + capsys, pip_conf, from_line, pypi_repository, reuse_hashes, expected ): # Create an install requirement with the hashes included in its options options = {"hashes": {"sha256": [entry.split(":")[1] for entry in NONSENSE]}} @@ -57,7 +57,7 @@ def test_toggle_reuse_hashes_local_repository( ) with local_repository.allow_all_wheels(): assert local_repository.get_hashes(from_line("small-fake-a==0.1")) == expected - captured = capfd.readouterr() + captured = capsys.readouterr() assert captured.out == "" if reuse_hashes: assert captured.err == "" diff --git a/tests/test_repository_pypi.py b/tests/test_repository_pypi.py index 75abc8100..6d02170dd 100644 --- a/tests/test_repository_pypi.py +++ b/tests/test_repository_pypi.py @@ -10,7 +10,7 @@ from piptools.repositories.pypi import open_local_or_remote_file -def test_generate_hashes_all_platforms(capfd, pip_conf, from_line, pypi_repository): +def test_generate_hashes_all_platforms(capsys, pip_conf, from_line, pypi_repository): expected = { "sha256:8d4d131cd05338e09f461ad784297efea3652e542c5fabe04a62358429a6175e", "sha256:ad05e1371eb99f257ca00f791b755deb22e752393eb8e75bc01d651715b02ea9", @@ -20,7 +20,7 @@ def test_generate_hashes_all_platforms(capfd, pip_conf, from_line, pypi_reposito ireq = from_line("small-fake-multi-arch==0.1") with pypi_repository.allow_all_wheels(): assert pypi_repository.get_hashes(ireq) == expected - captured = capfd.readouterr() + captured = capsys.readouterr() assert captured.out == "" assert ( captured.err.strip() diff --git a/tests/test_sync.py b/tests/test_sync.py index 52bea8d4f..4c3dd1dc9 100644 --- a/tests/test_sync.py +++ b/tests/test_sync.py @@ -431,14 +431,14 @@ def test_sync_ask_declined( to_install = {from_line(pkg) for pkg in to_install} sync(to_install, to_uninstall, ask=True) - out, err = capsys.readouterr() - assert out.splitlines() == [ + captured = capsys.readouterr() + assert captured.out.splitlines() == [ expected_message, " click==4.0", " django==1.8", "Would you like to proceed with these changes? [y/N]: ", ] - assert err == "" + assert captured.err == "" run.assert_not_called() @@ -457,15 +457,15 @@ def test_sync_ask_accepted(run, monkeypatch, capsys, from_line, dry_run): ) assert run.call_count == 2 - out, err = capsys.readouterr() - assert out.splitlines() == [ + captured = capsys.readouterr() + assert captured.out.splitlines() == [ "Would uninstall:", " click==4.0", "Would install:", " django==1.8", "Would you like to proceed with these changes? [y/N]: ", ] - assert err == "" + assert captured.err == "" @mock.patch("piptools.sync.run") diff --git a/tests/test_writer.py b/tests/test_writer.py index 3e0b37a32..071f30d15 100644 --- a/tests/test_writer.py +++ b/tests/test_writer.py @@ -118,7 +118,7 @@ def test_iter_lines__unsafe_dependencies(writer, from_line, allow_unsafe): assert tuple(lines) == expected_lines -def test_iter_lines__unsafe_with_hashes(capfd, writer, from_line): +def test_iter_lines__unsafe_with_hashes(capsys, writer, from_line): writer.allow_unsafe = False writer.emit_header = False ireqs = [from_line("test==1.2")] @@ -134,12 +134,12 @@ def test_iter_lines__unsafe_with_hashes(capfd, writer, from_line): comment("# setuptools"), ) assert tuple(lines) == expected_lines - captured = capfd.readouterr() + captured = capsys.readouterr() assert captured.out == "" assert captured.err.strip() == MESSAGE_UNINSTALLABLE -def test_iter_lines__hash_missing(capfd, writer, from_line): +def test_iter_lines__hash_missing(capsys, writer, from_line): writer.allow_unsafe = False writer.emit_header = False ireqs = [from_line("test==1.2"), from_line("file:///example/#egg=example")] @@ -153,7 +153,7 @@ def test_iter_lines__hash_missing(capfd, writer, from_line): "test==1.2 \\\n --hash=FAKEHASH", ) assert tuple(lines) == expected_lines - captured = capfd.readouterr() + captured = capsys.readouterr() assert captured.out == "" assert captured.err.strip() == MESSAGE_UNINSTALLABLE From a9575ef376867fe41c127c314838280e8616dd12 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Fri, 4 Dec 2020 07:43:44 -0800 Subject: [PATCH 8/9] Use more specific JSONDecodeError Introduced in Python 3.5 --- piptools/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piptools/cache.py b/piptools/cache.py index 7f4413154..027c180a6 100644 --- a/piptools/cache.py +++ b/piptools/cache.py @@ -37,7 +37,7 @@ def read_cache_file(cache_file_path): with open(cache_file_path) as cache_file: try: doc = json.load(cache_file) - except ValueError: + except json.JSONDecodeError: raise CorruptCacheError(cache_file_path) # Check version and load the contents From f63953e2f9266d1ac4c191bcde1b73e8d4e2819d Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Sat, 5 Dec 2020 14:13:34 -0800 Subject: [PATCH 9/9] Add pathlib support --- piptools/repositories/pypi.py | 2 +- tests/conftest.py | 15 ++++++--------- tests/test_cache.py | 11 ++++------- tests/test_repository_pypi.py | 8 ++++---- tests/test_top_level_editable.py | 2 +- 5 files changed, 16 insertions(+), 22 deletions(-) diff --git a/piptools/repositories/pypi.py b/piptools/repositories/pypi.py index a1b79ed42..0b7bad8ad 100644 --- a/piptools/repositories/pypi.py +++ b/piptools/repositories/pypi.py @@ -86,7 +86,7 @@ def __init__(self, pip_args, cache_dir): # Setup file paths self._build_dir = None self._source_dir = None - self._cache_dir = normalize_path(cache_dir) + self._cache_dir = normalize_path(str(cache_dir)) self._download_dir = os.path.join(self._cache_dir, "pkgs") if PIP_VERSION[:2] <= (20, 2): self._wheel_download_dir = os.path.join(self._cache_dir, "wheels") diff --git a/tests/conftest.py b/tests/conftest.py index bb541e753..0936845ed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -135,13 +135,13 @@ def repository(): def pypi_repository(tmpdir): return PyPIRepository( ["--index-url", PyPIRepository.DEFAULT_INDEX_URL], - cache_dir=str(tmpdir / "pypi-repo"), + cache_dir=(tmpdir / "pypi-repo"), ) @pytest.fixture def depcache(tmpdir): - return DependencyCache(str(tmpdir / "dep-cache")) + return DependencyCache(tmpdir / "dep-cache") @pytest.fixture @@ -246,8 +246,7 @@ def _make_package(name, version="0.1", install_requires=None): package_dir = tmp_path / "packages" / name / version package_dir.mkdir(parents=True) - setup_file = str(package_dir / "setup.py") - with open(setup_file, "w") as fp: + with (package_dir / "setup.py").open("w") as fp: fp.write( dedent( f"""\ @@ -265,9 +264,7 @@ def _make_package(name, version="0.1", install_requires=None): ) # Create a README to avoid setuptools warnings. - readme_file = str(package_dir / "README") - with open(readme_file, "w"): - pass + (package_dir / "README").touch() return package_dir @@ -281,9 +278,9 @@ def run_setup_file(): """ def _run_setup_file(package_dir_path, *args): - setup_file = str(package_dir_path / "setup.py") + setup_file = package_dir_path / "setup.py" return subprocess.run( - [sys.executable, setup_file, *args], + [sys.executable, str(setup_file), *args], cwd=str(package_dir_path), stdout=subprocess.DEVNULL, check=True, diff --git a/tests/test_cache.py b/tests/test_cache.py index 1eebf1399..dac60022e 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -64,7 +64,7 @@ def test_read_cache_file_successful(): def test_read_cache_does_not_exist(tmpdir): - cache = DependencyCache(cache_dir=str(tmpdir)) + cache = DependencyCache(cache_dir=tmpdir) assert cache.cache == {} @@ -72,7 +72,7 @@ def test_read_cache_does_not_exist(tmpdir): sys.platform == "win32", reason="os.fchmod() not available on Windows" ) def test_read_cache_permission_error(tmpdir): - cache = DependencyCache(cache_dir=str(tmpdir)) + cache = DependencyCache(cache_dir=tmpdir) with open(cache._cache_file, "w") as fp: os.fchmod(fp.fileno(), 0o000) with pytest.raises(IOError, match="Permission denied"): @@ -80,12 +80,9 @@ def test_read_cache_permission_error(tmpdir): def test_reverse_dependencies(from_line, tmpdir): - # Since this is a test, make a temporary directory. Converting to str from py.path. - tmp_dir_path = str(tmpdir) - # Create a cache object. The keys are packages, and the values are lists # of packages on which the keys depend. - cache = DependencyCache(cache_dir=tmp_dir_path) + cache = DependencyCache(cache_dir=tmpdir) cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] cache[from_line("top[xtra]==1.2")] = ["middle>=0.3", "bottom>=5.1.2", "bonus==0.4"] cache[from_line("middle==0.4")] = ["bottom<6"] @@ -121,4 +118,4 @@ def test_reverse_dependencies(from_line, tmpdir): } # Clean up our temp directory - rmtree(tmp_dir_path) + rmtree(tmpdir) diff --git a/tests/test_repository_pypi.py b/tests/test_repository_pypi.py index 6d02170dd..0dd2d72d8 100644 --- a/tests/test_repository_pypi.py +++ b/tests/test_repository_pypi.py @@ -160,7 +160,7 @@ def test_relative_path_cache_dir_is_normalized(from_line): def test_relative_path_pip_cache_dir_is_normalized(from_line, tmpdir): relative_cache_dir = "pip-cache" pypi_repository = PyPIRepository( - ["--cache-dir", relative_cache_dir], cache_dir=str(tmpdir / "pypi-repo-cache") + ["--cache-dir", relative_cache_dir], cache_dir=(tmpdir / "pypi-repo-cache") ) assert os.path.isabs(pypi_repository.options.cache_dir) @@ -169,7 +169,7 @@ def test_relative_path_pip_cache_dir_is_normalized(from_line, tmpdir): def test_pip_cache_dir_is_empty(from_line, tmpdir): pypi_repository = PyPIRepository( - ["--no-cache-dir"], cache_dir=str(tmpdir / "pypi-repo-cache") + ["--no-cache-dir"], cache_dir=(tmpdir / "pypi-repo-cache") ) assert not pypi_repository.options.cache_dir @@ -264,7 +264,7 @@ def _get_project(self, ireq): return project_data pypi_repository = MockPyPIRepository( - ["--no-cache-dir"], cache_dir=str(tmpdir / "pypi-repo-cache") + ["--no-cache-dir"], cache_dir=(tmpdir / "pypi-repo-cache") ) ireq = from_line("fake-package==0.1") @@ -372,7 +372,7 @@ def test_name_collision(from_line, pypi_repository, make_package, make_sdist, tm } for pkg_name, pkg in packages.items(): - pkg_path = str(tmpdir / pkg_name) + pkg_path = tmpdir / pkg_name make_sdist(pkg, pkg_path, "--formats=zip") diff --git a/tests/test_top_level_editable.py b/tests/test_top_level_editable.py index 9391e905e..f92406b86 100644 --- a/tests/test_top_level_editable.py +++ b/tests/test_top_level_editable.py @@ -19,7 +19,7 @@ def get_dependencies(self, ireq): @pytest.fixture def mocked_repository(tmpdir): - return MockedPyPIRepository(["--no-index"], cache_dir=str(tmpdir / "pypi-repo")) + return MockedPyPIRepository(["--no-index"], cache_dir=(tmpdir / "pypi-repo")) def test_editable_top_level_deps_preserved(