Skip to content

Commit

Permalink
Merge pull request #6145 from pypa/fix-ruff-errors-spring-24
Browse files Browse the repository at this point in the history
apply ruff fixes
  • Loading branch information
matteius committed Apr 27, 2024
2 parents 6249346 + 80d941f commit 3ac18c3
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 51 deletions.
4 changes: 2 additions & 2 deletions pipenv/cli/options.py
Expand Up @@ -467,7 +467,7 @@ def validate_python_path(ctx, param, value):
# we'll report absolute paths which do not exist:
if isinstance(value, (str, bytes)):
if os.path.isabs(value) and not os.path.isfile(value):
raise BadParameter("Expected Python at path %s does not exist" % value)
raise BadParameter(f"Expected Python at path {value} does not exist")
return value


Expand All @@ -479,7 +479,7 @@ def validate_bool_or_none(ctx, param, value):

def validate_pypi_mirror(ctx, param, value):
if value and not is_valid_url(value):
raise BadParameter("Invalid PyPI mirror URL: %s" % value)
raise BadParameter(f"Invalid PyPI mirror URL: {value}")
return value


Expand Down
2 changes: 1 addition & 1 deletion pipenv/environment.py
Expand Up @@ -310,7 +310,7 @@ def build_command(
lines = pylib_lines + pyinc_lines
if scripts:
lines.append(
"u'scripts': u'{0}'.format(%s)" % sysconfig_line.format("scripts")
"u'scripts': u'{{0}}'.format({})".format(sysconfig_line.format("scripts"))
)
if py_version:
lines.append(
Expand Down
2 changes: 1 addition & 1 deletion pipenv/exceptions.py
Expand Up @@ -157,7 +157,7 @@ def show(self, file=None):
if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None:
hint = f'Try "{self.ctx.command_path} {self.ctx.help_option_names[0]}" for help.\n'
if self.ctx is not None:
click.echo(self.ctx.get_usage() + "\n%s" % hint, file=file, color=color)
click.echo(self.ctx.get_usage() + f"\n{hint}", file=file, color=color)
click.echo(self.message, file=file)


Expand Down
14 changes: 7 additions & 7 deletions pipenv/utils/exceptions.py
Expand Up @@ -14,7 +14,7 @@ def __init__(self, param):

@classmethod
def get_message(cls, param):
return "Missing Parameter: %s" % param
return f"Missing Parameter: {param}"

def show(self, param):
print(self.message, file=sys.stderr, flush=True)
Expand All @@ -37,9 +37,9 @@ def __init__(self, path, *args, **kwargs):
super().__init__(self.message)

def get_message(self, path, backup_path=None):
message = "ERROR: Failed to load file at %s" % path
message = f"ERROR: Failed to load file at {path}"
if backup_path:
msg = "it will be backed up to %s and removed" % backup_path
msg = f"it will be backed up to {backup_path} and removed"
else:
msg = "it will be removed and replaced on the next lock."
message = f"{message}\nYour lockfile is corrupt, {msg}"
Expand All @@ -55,9 +55,9 @@ def __init__(self, path, backup_path=None):
super().__init__(self.message)

def get_message(self, path, backup_path=None):
message = "ERROR: Failed to load lockfile at %s" % path
message = f"ERROR: Failed to load lockfile at {path}"
if backup_path:
msg = "it will be backed up to %s and removed" % backup_path
msg = f"it will be backed up to {backup_path} and removed"
else:
msg = "it will be removed and replaced on the next lock."
message = f"{message}\nYour lockfile is corrupt, {msg}"
Expand All @@ -73,9 +73,9 @@ def __init__(self, path, backup_path=None):
super().__init__(self.message)

def get_message(self, path, backup_path=None):
message = "ERROR: Failed to load Pipfile at %s" % path
message = f"ERROR: Failed to load Pipfile at {path}"
if backup_path:
msg = "it will be backed up to %s and removed" % backup_path
msg = f"it will be backed up to {backup_path} and removed"
else:
msg = "it will be removed and replaced on the next lock."
message = f"{message}\nYour Pipfile is corrupt, {msg}"
Expand Down
16 changes: 8 additions & 8 deletions pipenv/utils/requirementslib.py
Expand Up @@ -361,7 +361,7 @@ def get_path(root, path, default=_UNSET):
cur = cur[seg]
except (ValueError, KeyError, IndexError, TypeError):
if not getattr(cur, "__iter__", None):
exc = TypeError("%r object is not indexable" % type(cur).__name__)
exc = TypeError(f"{type(cur).__name__!r} object is not indexable")
raise PathAccessError(exc, seg, path)
except PathAccessError:
if default is _UNSET:
Expand Down Expand Up @@ -429,7 +429,7 @@ def dict_path_exit(path, key, old_parent, new_parent, new_items):
except AttributeError:
ret = new_parent.__class__(vals) # frozensets
else:
raise RuntimeError("unexpected iterable type: %r" % type(new_parent))
raise RuntimeError(f"unexpected iterable type: {type(new_parent)!r}")
return ret


Expand Down Expand Up @@ -519,14 +519,14 @@ def remap(
# TODO: improve argument formatting in sphinx doc
# TODO: enter() return (False, items) to continue traverse but cancel copy?
if not callable(visit):
raise TypeError("visit expected callable, not: %r" % visit)
raise TypeError(f"visit expected callable, not: {visit!r}")
if not callable(enter):
raise TypeError("enter expected callable, not: %r" % enter)
raise TypeError(f"enter expected callable, not: {enter!r}")
if not callable(exit):
raise TypeError("exit expected callable, not: %r" % exit)
raise TypeError(f"exit expected callable, not: {exit!r}")
reraise_visit = kwargs.pop("reraise_visit", True)
if kwargs:
raise TypeError("unexpected keyword arguments: %r" % kwargs.keys())
raise TypeError(f"unexpected keyword arguments: {kwargs.keys()!r}")

path, registry, stack = (), {}, [(None, root)]
new_items_stack = []
Expand All @@ -551,7 +551,7 @@ def remap(
# TODO: handle False?
raise TypeError(
"enter should return a tuple of (new_parent,"
" items_iterator), not: %r" % res
f" items_iterator), not: {res!r}"
)
if new_items is not False:
# traverse unless False is explicitly passed
Expand Down Expand Up @@ -583,7 +583,7 @@ def remap(
try:
new_items_stack[-1][1].append(visited_item)
except IndexError:
raise TypeError("expected remappable root, not: %r" % root)
raise TypeError(f"expected remappable root, not: {root!r}")
return value


Expand Down
20 changes: 10 additions & 10 deletions tasks/release.py
Expand Up @@ -19,7 +19,7 @@


def log(msg):
print("[release] %s" % msg)
print(f"[release] {msg}")


def get_version_file(ctx):
Expand Down Expand Up @@ -126,11 +126,11 @@ def build_dists(ctx):
executable = ctx.run(
"python -c 'import sys; print(sys.executable)'", hide=True
).stdout.strip()
log("Building sdist using %s ...." % executable)
log(f"Building sdist using {executable} ....")
os.environ["PIPENV_PYTHON"] = py_version
ctx.run("pipenv install --dev", env=env)
ctx.run("pipenv run pip install -e . --upgrade --upgrade-strategy=eager", env=env)
log("Building wheel using python %s ...." % py_version)
log(f"Building wheel using python {py_version} ....")
ctx.run("pipenv run python -m build", env=env)


Expand Down Expand Up @@ -224,8 +224,8 @@ def clean_mdchangelog(ctx, filename=None, content=None):
def tag_version(ctx, push=False):
version = find_version(ctx)
version = semver.VersionInfo.parse(version)
log("Tagging revision: v%s" % version)
ctx.run("git tag v%s" % version)
log(f"Tagging revision: v{version}")
ctx.run(f"git tag v{version}")
if push:
log("Pushing tags...")
ctx.run("git push origin master")
Expand Down Expand Up @@ -283,17 +283,17 @@ def bump_version(ctx, dry_run=False, pre=False, dev=False):
new_version = new_version.bump_prerelease(current_version, "dev")

# Update the version file
log("Updating version to %s" % new_version)
log(f"Updating version to {new_version}")
version = find_version(ctx)
log("Found current version: %s" % version)
log(f"Found current version: {version}")
if dry_run:
log("Would update to: %s" % new_version)
log(f"Would update to: {new_version}")
else:
log("Updating to: %s" % new_version)
log(f"Updating to: {new_version}")
version_file = get_version_file(ctx)
file_contents = version_file.read_text()
version_file.write_text(file_contents.replace(version, str(new_version)))
ctx.run(f"git add {version_file.as_posix()}")
log("Committing...")
ctx.run('git commit -s -m "Bumped version to %s."' % new_version)
ctx.run(f'git commit -s -m "Bumped version to {new_version}."')
return str(new_version)
44 changes: 22 additions & 22 deletions tasks/vendoring/__init__.py
Expand Up @@ -147,14 +147,14 @@ def _get_patched_dir(ctx):
def clean_vendor(ctx, vendor_dir):
# Old _vendor cleanup
remove_all(vendor_dir.glob("*.pyc"))
log("Cleaning %s" % vendor_dir)
log(f"Cleaning {vendor_dir}")
for item in vendor_dir.iterdir():
if item.is_dir():
shutil.rmtree(str(item))
elif item.name not in FILE_WHITE_LIST:
item.unlink()
else:
log("Skipping %s" % item)
log(f"Skipping {item}")


def detect_all_vendored_libs(ctx):
Expand Down Expand Up @@ -216,13 +216,13 @@ def rewrite_file_imports(item, vendored_libs):

for lib, to_lib in vendored_libs.items():
text = re.sub(
r"(?m)^(\s*)import %s((?:\.\S*)?\s+as)" % lib,
r"\1import %s\2" % to_lib,
rf"(?m)^(\s*)import {lib}((?:\.\S*)?\s+as)",
rf"\1import {to_lib}\2",
text,
)
text = re.sub(r"(?m)^(\s*)from %s([\s\.]+)" % lib, r"\1from %s\2" % to_lib, text)
text = re.sub(rf"(?m)^(\s*)from {lib}([\s\.]+)", rf"\1from {to_lib}\2", text)
text = re.sub(
r"(?m)^(\s*)import %s(\s*[,\n#])" % lib,
rf"(?m)^(\s*)import {lib}(\s*[,\n#])",
rf"\1import {to_lib} as {lib}\2",
text,
)
Expand All @@ -232,8 +232,8 @@ def rewrite_file_imports(item, vendored_libs):


def apply_patch(ctx, patch_file_path):
log("Applying patch %s" % patch_file_path.name)
ctx.run("git apply --ignore-whitespace --verbose %s" % patch_file_path)
log(f"Applying patch {patch_file_path.name}")
ctx.run(f"git apply --ignore-whitespace --verbose {patch_file_path}")


def _recursive_write_to_zip(zf, path, root=None):
Expand Down Expand Up @@ -267,7 +267,7 @@ def rename_if_needed(ctx, vendor_dir, item):

def _ensure_package_in_requirements(ctx, requirements_file, package):
requirement = None
log("using requirements file: %s" % requirements_file)
log(f"using requirements file: {requirements_file}")
req_file_lines = list(requirements_file.read_text().splitlines())
if package:
match = [r for r in req_file_lines if r.strip().lower().startswith(package)]
Expand All @@ -280,10 +280,10 @@ def _ensure_package_in_requirements(ctx, requirements_file, package):
):
matched_req = f"{m}"
requirement = matched_req
log("Matched req: %r" % matched_req)
log(f"Matched req: {matched_req!r}")
if not matched_req:
req_file_lines.append(f"{package}")
log("Writing requirements file: %s" % requirements_file)
log(f"Writing requirements file: {requirements_file}")
requirements_file.write_text("\n".join(req_file_lines))
requirement = f"{package}"
return requirement
Expand All @@ -292,7 +292,7 @@ def _ensure_package_in_requirements(ctx, requirements_file, package):
def install(ctx, vendor_dir, package=None):
requirements_file = vendor_dir / f"{vendor_dir.name}.txt"
requirement = f"-r {requirements_file.as_posix()}"
log("Using requirements file: %s" % requirement)
log(f"Using requirements file: {requirement}")
if package:
requirement = _ensure_package_in_requirements(ctx, requirements_file, package)
# We use --no-deps because we want to ensure that all of our dependencies
Expand Down Expand Up @@ -378,7 +378,7 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True):
post_install_cleanup(ctx, vendor_dir)
# Detect the vendored packages/modules
vendored_libs = detect_all_vendored_libs(ctx)
log("Detected vendored libraries: %s" % ", ".join(vendored_libs))
log("Detected vendored libraries: {}".format(", ".join(vendored_libs)))

# Apply pre-patches
log("Applying pre-patches...")
Expand All @@ -395,7 +395,7 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True):
for item in vendor_dir.iterdir():
if item.is_dir():
if rewrite and not package or (package and item.name.lower() in package):
log("Rewriting imports for %s..." % item)
log(f"Rewriting imports for {item}...")
rewrite_imports(item, vendored_libs)
rename_if_needed(ctx, vendor_dir, item)
elif item.name not in FILE_WHITE_LIST and (
Expand All @@ -419,12 +419,12 @@ def redo_imports(ctx, library, vendor_dir=None):
vendor_dir = _get_vendor_dir(ctx)
else:
vendor_dir = Path(vendor_dir).absolute()
log("Using vendor dir: %s" % vendor_dir)
log(f"Using vendor dir: {vendor_dir}")
vendored_libs = detect_all_vendored_libs(ctx)
item = vendor_dir / library
library_name = vendor_dir / f"{library}.py"
log("Detected vendored libraries: %s" % ", ".join(vendored_libs))
log("Rewriting imports for %s..." % item)
log("Detected vendored libraries: {}".format(", ".join(vendored_libs)))
log(f"Rewriting imports for {item}...")
if item.is_dir():
rewrite_imports(item, vendored_libs)
else:
Expand All @@ -435,9 +435,9 @@ def redo_imports(ctx, library, vendor_dir=None):
def rewrite_all_imports(ctx):
vendor_dir = _get_vendor_dir(ctx)
patched_dir = _get_patched_dir(ctx)
log("Using vendor dir: %s" % vendor_dir)
log(f"Using vendor dir: {vendor_dir}")
vendored_libs = detect_all_vendored_libs(ctx)
log("Detected vendored libraries: %s" % ", ".join(vendored_libs))
log("Detected vendored libraries: {}".format(", ".join(vendored_libs)))
log("Rewriting all imports related to vendored libs")
for item in itertools.chain(patched_dir.iterdir(), vendor_dir.iterdir()):
if item.is_dir():
Expand Down Expand Up @@ -761,14 +761,14 @@ def main(ctx, package=None, type=None):
target_dirs = [vendor_dir, patched_dir]
if package:
if type is None or type == "vendor":
log("Using vendor dir: %s" % vendor_dir)
log(f"Using vendor dir: {vendor_dir}")
vendor(ctx, vendor_dir, package=package)
download_licenses(ctx, vendor_dir, package=package)
elif type == "patched":
log("Using patched dir: %s" % patched_dir)
log(f"Using patched dir: {patched_dir}")
vendor(ctx, patched_dir, package=package)
download_licenses(ctx, patched_dir, package=package)
log("Vendored %s" % package)
log(f"Vendored {package}")
return
for package_dir in target_dirs:
clean_vendor(ctx, package_dir)
Expand Down

0 comments on commit 3ac18c3

Please sign in to comment.