Skip to content

Commit

Permalink
Merge pull request #2821 from cphyc/code-cleaning-flynting-2
Browse files Browse the repository at this point in the history
[f-string 2/2] apply flynt
  • Loading branch information
munkm committed Aug 11, 2020
2 parents 36f76dc + 554f85d commit 5fb1bea
Show file tree
Hide file tree
Showing 197 changed files with 995 additions and 1,104 deletions.
3 changes: 2 additions & 1 deletion .git-blame-ignore-revs
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,5 @@ ebadee629414aed2c7b6526e22a419205329ec38
# converting to f-strings
ad898e8e3954bc348daaa449d5ed73db778785e9
ef51ad5199692afcf1a8ab491aa115c00c423113
323ac4ddd4e99d6b951666736d4e9b03b6cfa21e
323ac4ddd4e99d6b951666736d4e9b03b6cfa21e
f7445f02022293f1b089cd8907000301516354bf
2 changes: 1 addition & 1 deletion yt/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
# We changed them all to lowercase
if option.lower() in ytcfg_defaults:
new_cp.set("yt", option, cp.get(section, option))
print("Setting %s to %s" % (option, cp.get(section, option)))
print(f"Setting {option} to {cp.get(section, option)}")
open(_OLD_CONFIG_FILE + ".old", "w").write(f)
new_cp.write(open(_OLD_CONFIG_FILE, "w"))

Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/analyzer_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __init__(self, *args, **kwargs):

def __repr__(self):
# Stolen from YTDataContainer.__repr__
s = "%s: " % (self.__class__.__name__)
s = f"{self.__class__.__name__}: "
s += ", ".join(["%s=%s" % (i, getattr(self, i)) for i in self._params])
return s

Expand Down Expand Up @@ -66,7 +66,7 @@ class QuantityProxy(AnalysisTask):

def __repr__(self):
# Stolen from YTDataContainer.__repr__
s = "%s: " % (self.__class__.__name__)
s = f"{self.__class__.__name__}: "
s += ", ".join(["%s" % [arg for arg in self.args]])
s += ", ".join(["%s=%s" % (k, v) for k, v in self.kwargs.items()])
return s
Expand Down
38 changes: 18 additions & 20 deletions yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def _initialize_projected_units(self, fields, chunk):
path_length_unit = Unit(registry=self.ds.unit_registry)
else:
ax_name = self.ds.coordinates.axis_name[self.axis]
path_element_name = ("index", "path_element_%s" % (ax_name))
path_element_name = ("index", f"path_element_{ax_name}")
path_length_unit = self.ds.field_info[path_element_name].units
path_length_unit = Unit(
path_length_unit, registry=self.ds.unit_registry
Expand Down Expand Up @@ -552,7 +552,7 @@ def _handle_chunk(self, chunk, fields, tree):
dl = self.ds.quan(1.0, "")
else:
ax_name = self.ds.coordinates.axis_name[self.axis]
dl = chunk["index", "path_element_%s" % (ax_name)]
dl = chunk["index", f"path_element_{ax_name}"]
# This is done for cases where our path element does not have a CGS
# equivalent. Once "preferred units" have been implemented, this
# will not be necessary at all, as the final conversion will occur
Expand Down Expand Up @@ -876,12 +876,12 @@ def _fill_sph_particles(self, fields):
fi = self.ds._get_field_info(field)
ptype = fi.name[0]
if ptype not in self.ds._sph_ptypes:
raise KeyError("%s is not a SPH particle type!" % ptype)
raise KeyError(f"{ptype} is not a SPH particle type!")
buff = np.zeros(size, dtype="float64")
if normalize:
buff_den = np.zeros(size, dtype="float64")

pbar = tqdm(desc="Interpolating SPH field {}".format(field))
pbar = tqdm(desc=f"Interpolating SPH field {field}")
for chunk in self._data_source.chunks([field], "io"):
px = chunk[(ptype, "particle_position_x")].in_base("code").d
py = chunk[(ptype, "particle_position_y")].in_base("code").d
Expand Down Expand Up @@ -991,11 +991,11 @@ def _fill_fields(self, fields):
def _generate_container_field(self, field):
rv = self.ds.arr(np.ones(self.ActiveDimensions, dtype="float64"), "")
axis_name = self.ds.coordinates.axis_name
if field == ("index", "d%s" % axis_name[0]):
if field == ("index", f"d{axis_name[0]}"):
np.multiply(rv, self.dds[0], rv)
elif field == ("index", "d%s" % axis_name[1]):
elif field == ("index", f"d{axis_name[1]}"):
np.multiply(rv, self.dds[1], rv)
elif field == ("index", "d%s" % axis_name[2]):
elif field == ("index", f"d{axis_name[2]}"):
np.multiply(rv, self.dds[2], rv)
elif field == ("index", axis_name[0]):
x = np.mgrid[
Expand Down Expand Up @@ -1031,7 +1031,7 @@ def RightEdge(self):
return self.right_edge

def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
cls = getattr(particle_deposit, "deposit_%s" % method, None)
cls = getattr(particle_deposit, f"deposit_{method}", None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs. Everything
Expand Down Expand Up @@ -2035,22 +2035,20 @@ def _export_obj(
fmtl.write(
"newmtl " + omname + "\n"
) # the specific material (color) for this face
fmtl.write(f"Ka {0.0:.6f} {0.0:.6f} {0.0:.6f}\n") # ambient color, keep off
fmtl.write(
"Ka %.6f %.6f %.6f\n" % (0.0, 0.0, 0.0)
) # ambient color, keep off
fmtl.write(
"Kd %.6f %.6f %.6f\n" % (lut[0][i], lut[1][i], lut[2][i])
f"Kd {lut[0][i]:.6f} {lut[1][i]:.6f} {lut[2][i]:.6f}\n"
) # color of face
fmtl.write(
"Ks %.6f %.6f %.6f\n" % (0.0, 0.0, 0.0)
f"Ks {0.0:.6f} {0.0:.6f} {0.0:.6f}\n"
) # specular color, keep off
fmtl.write("d %.6f\n" % (transparency)) # transparency
fmtl.write("em %.6f\n" % (emiss[i])) # emissivity per color
fmtl.write(f"d {transparency:.6f}\n") # transparency
fmtl.write(f"em {emiss[i]:.6f}\n") # emissivity per color
fmtl.write("illum 2\n") # not relevant, 2 means highlights on?
fmtl.write("Ns %.6f\n\n" % (0.0)) # keep off, some other specular thing
# (2) write vertices
for i in range(0, self.vertices.shape[1]):
fobj.write("v %.6f %.6f %.6f\n" % (v["x"][i], v["y"][i], v["z"][i]))
fobj.write(f"v {v['x'][i]:.6f} {v['y'][i]:.6f} {v['z'][i]:.6f}\n")
fobj.write("#done defining vertices\n\n")
# (3) define faces and materials for each face
for i in range(0, self.triangles.shape[0]):
Expand Down Expand Up @@ -2538,8 +2536,8 @@ def _upload_to_sketchfab(self, data, files):
import requests

SKETCHFAB_DOMAIN = "sketchfab.com"
SKETCHFAB_API_URL = "https://api.{}/v2/models".format(SKETCHFAB_DOMAIN)
SKETCHFAB_MODEL_URL = "https://{}/models/".format(SKETCHFAB_DOMAIN)
SKETCHFAB_API_URL = f"https://api.{SKETCHFAB_DOMAIN}/v2/models"
SKETCHFAB_MODEL_URL = f"https://{SKETCHFAB_DOMAIN}/models/"

try:
r = requests.post(SKETCHFAB_API_URL, data=data, files=files, verify=False)
Expand Down Expand Up @@ -2722,7 +2720,7 @@ def _sanitize_ptypes(self, ptypes):
self.ds.index
for ptype in ptypes:
if ptype not in self.ds.particle_types:
mess = "{} not found. Particle type must ".format(ptype)
mess = f"{ptype} not found. Particle type must "
mess += "be in the dataset!"
raise TypeError(mess)

Expand Down Expand Up @@ -2814,7 +2812,7 @@ def scatter_smooth(self, fields, units, normalize):
buff_den = np.empty(0)

ptype = fields[0]
pbar = tqdm(desc="Interpolating (scatter) SPH field {}".format(fields[0]))
pbar = tqdm(desc=f"Interpolating (scatter) SPH field {fields[0]}")
for chunk in self._data_source.chunks([fields], "io"):
px = chunk[(ptype, "particle_position_x")].in_base("code").d
py = chunk[(ptype, "particle_position_y")].in_base("code").d
Expand Down
36 changes: 18 additions & 18 deletions yt/data_objects/data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ def _generate_particle_field(self, field):
ind = 0
for _io_chunk in self.chunks([], "io", cache=False):
for _chunk in self.chunks(field, "spatial"):
x, y, z = (self[ftype, "particle_position_%s" % ax] for ax in "xyz")
x, y, z = (self[ftype, f"particle_position_{ax}"] for ax in "xyz")
if x.size == 0:
continue
mask = self._current_chunk.objs[0].select_particles(
Expand All @@ -413,7 +413,7 @@ def _count_particles(self, ftype):
size = 0
for _io_chunk in self.chunks([], "io", cache=False):
for _chunk in self.chunks([], "spatial"):
x, y, z = (self[ftype, "particle_position_%s" % ax] for ax in "xyz")
x, y, z = (self[ftype, f"particle_position_{ax}"] for ax in "xyz")
if x.size == 0:
continue
size += self._current_chunk.objs[0].count_particles(
Expand Down Expand Up @@ -607,7 +607,7 @@ def save_as_dataset(self, filename=None, fields=None):
"""

keyword = "%s_%s" % (str(self.ds), self._type_name)
keyword = f"{str(self.ds)}_{self._type_name}"
filename = get_output_filename(filename, keyword, ".h5")

data = {}
Expand Down Expand Up @@ -645,7 +645,7 @@ def save_as_dataset(self, filename=None, fields=None):
if need_particle_positions:
for ax in self.ds.coordinates.axis_order:
for ptype in ptypes:
p_field = (ptype, "particle_position_%s" % ax)
p_field = (ptype, f"particle_position_{ax}")
if p_field in self.ds.field_info and p_field not in data:
data_fields.append(field)
ftypes[p_field] = p_field[0]
Expand Down Expand Up @@ -862,7 +862,7 @@ def create_firefly_object(
## the UI name
if log_flag:
units = units[len("log(") : -1]
field = "log{}".format(field)
field = f"log{field}"

## perform the unit conversion and take the log if
## necessary.
Expand Down Expand Up @@ -1033,7 +1033,7 @@ def max(self, field, axis=None):
r = self.ds.proj(field, axis, data_source=self, method="mip")
return r
else:
raise NotImplementedError("Unknown axis %s" % axis)
raise NotImplementedError(f"Unknown axis {axis}")

def min(self, field, axis=None):
r"""Compute the minimum of a field.
Expand Down Expand Up @@ -1073,7 +1073,7 @@ def min(self, field, axis=None):
"Minimum intensity projection not" " implemented."
)
else:
raise NotImplementedError("Unknown axis %s" % axis)
raise NotImplementedError(f"Unknown axis {axis}")

def std(self, field, weight=None):
"""Compute the variance of a field.
Expand Down Expand Up @@ -1245,7 +1245,7 @@ def mean(self, field, axis=None, weight=None):
elif axis is None:
r = self.quantities.weighted_average_quantity(field, weight_field)
else:
raise NotImplementedError("Unknown axis %s" % axis)
raise NotImplementedError(f"Unknown axis {axis}")
return r

def sum(self, field, axis=None):
Expand Down Expand Up @@ -1282,7 +1282,7 @@ def sum(self, field, axis=None):
elif axis is None:
r = self.quantities.total_quantity(field)
else:
raise NotImplementedError("Unknown axis %s" % axis)
raise NotImplementedError(f"Unknown axis {axis}")
return r

def integrate(self, field, weight=None, axis=None):
Expand Down Expand Up @@ -1315,12 +1315,12 @@ def integrate(self, field, weight=None, axis=None):
if axis in self.ds.coordinates.axis_name:
r = self.ds.proj(field, axis, data_source=self, weight_field=weight_field)
else:
raise NotImplementedError("Unknown axis %s" % axis)
raise NotImplementedError(f"Unknown axis {axis}")
return r

@property
def _hash(self):
s = "%s" % self
s = f"{self}"
try:
import hashlib

Expand Down Expand Up @@ -1367,15 +1367,15 @@ def clone(self):

def __repr__(self):
# We'll do this the slow way to be clear what's going on
s = "%s (%s): " % (self.__class__.__name__, self.ds)
s = f"{self.__class__.__name__} ({self.ds}): "
for i in self._con_args:
try:
s += ", %s=%s" % (
i,
getattr(self, i).in_base(unit_system=self.ds.unit_system),
)
except AttributeError:
s += ", %s=%s" % (i, getattr(self, i))
s += f", {i}={getattr(self, i)}"
return s

@contextmanager
Expand Down Expand Up @@ -1553,7 +1553,7 @@ def selector(self):
if self._selector is not None:
return self._selector
s_module = getattr(self, "_selector_module", yt.geometry.selection_routines)
sclass = getattr(s_module, "%s_selector" % self._type_name, None)
sclass = getattr(s_module, f"{self._type_name}_selector", None)
if sclass is None:
raise YTDataSelectorNotImplemented(self._type_name)

Expand Down Expand Up @@ -2701,7 +2701,7 @@ def extract_isocontours(
for v1 in verts:
f.write("v %0.16e %0.16e %0.16e\n" % (v1[0], v1[1], v1[2]))
for i in range(len(verts) // 3):
f.write("f %s %s %s\n" % (i * 3 + 1, i * 3 + 2, i * 3 + 3))
f.write(f"f {i * 3 + 1} {i * 3 + 2} {i * 3 + 3}\n")
if not hasattr(filename, "write"):
f.close()
if sample_values is not None:
Expand Down Expand Up @@ -2861,8 +2861,8 @@ def extract_connected_sets(
if cid == -1:
continue
contours[level][cid] = base_object.cut_region(
["obj['contours_%s'] == %s" % (contour_key, cid)],
{"contour_slices_%s" % contour_key: cids},
[f"obj['contours_{contour_key}'] == {cid}"],
{f"contour_slices_{contour_key}": cids},
)
return cons, contours

Expand Down Expand Up @@ -2938,7 +2938,7 @@ def __init__(
self.op = op.upper()
self.dobj1 = dobj1
self.dobj2 = dobj2
name = "Boolean%sSelector" % (self.op,)
name = f"Boolean{self.op}Selector"
sel_cls = getattr(yt.geometry.selection_routines, name)
self._selector = sel_cls(self)

Expand Down
12 changes: 6 additions & 6 deletions yt/data_objects/derived_quantities.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def get_position_fields(field, data):
ftype = finfo.alias_name[0]
else:
ftype = finfo.name[0]
position_fields = [(ftype, "particle_position_%s" % d) for d in axis_names]
position_fields = [(ftype, f"particle_position_{d}") for d in axis_names]
else:
position_fields = axis_names

Expand Down Expand Up @@ -264,7 +264,7 @@ def process_chunk(
if self.use_particles:
vals += [
(
data[particle_type, "particle_position_%s" % ax]
data[particle_type, f"particle_position_{ax}"]
* data[particle_type, "particle_mass"]
).sum(dtype=np.float64)
for ax in "xyz"
Expand Down Expand Up @@ -336,7 +336,7 @@ def process_chunk(
vals = []
if use_gas:
vals += [
(data["gas", "velocity_%s" % ax] * data["gas", "mass"]).sum(
(data["gas", f"velocity_{ax}"] * data["gas", "mass"]).sum(
dtype=np.float64
)
for ax in "xyz"
Expand All @@ -345,7 +345,7 @@ def process_chunk(
if use_particles and "nbody" in data.ds.particle_types:
vals += [
(
data[particle_type, "particle_velocity_%s" % ax]
data[particle_type, f"particle_velocity_{ax}"]
* data[particle_type, "particle_mass"]
).sum(dtype=np.float64)
for ax in "xyz"
Expand Down Expand Up @@ -517,7 +517,7 @@ def process_chunk(
rvals.extend(
[
(
data["gas", "specific_angular_momentum_%s" % axis]
data["gas", f"specific_angular_momentum_{axis}"]
* data["gas", "mass"]
).sum(dtype=np.float64)
for axis in "xyz"
Expand All @@ -530,7 +530,7 @@ def process_chunk(
(
data[
self.particle_type,
"particle_specific_angular_momentum_%s" % axis,
f"particle_specific_angular_momentum_{axis}",
]
* data[self.particle_type, "particle_mass"]
).sum(dtype=np.float64)
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/grid_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def particle_operation(self, *args, **kwargs):

def deposit(self, positions, fields=None, method=None, kernel_name="cubic"):
# Here we perform our particle deposition.
cls = getattr(particle_deposit, "deposit_%s" % method, None)
cls = getattr(particle_deposit, f"deposit_{method}", None)
if cls is None:
raise YTParticleDepositionNotImplemented(method)
# We allocate number of zones, not number of octs. Everything
Expand Down

0 comments on commit 5fb1bea

Please sign in to comment.