Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #5111

Merged
merged 2 commits into from
Feb 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ repos:
additional_dependencies: [black==24.3.0]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.4
hooks:
- id: ruff-format
- id: ruff
Expand Down
3 changes: 1 addition & 2 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,7 @@ def pytest_configure(config):
# https://github.com/h5py/h5py/pull/2242
config.addinivalue_line(
"filterwarnings",
"ignore:`product` is deprecated as of NumPy 1.25.0"
":DeprecationWarning",
"ignore:`product` is deprecated as of NumPy 1.25.0:DeprecationWarning",
)

if PANDAS_VERSION is not None and PANDAS_VERSION >= Version("2.2.0"):
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2233,7 +2233,7 @@ def _export_obj(
fobj.write(
f"usemtl {omname}\n"
) # which material to use for this face (color)
fobj.write(f"f {cc} {cc+1} {cc+2}\n\n") # vertices to color
fobj.write(f"f {cc} {cc + 1} {cc + 2}\n\n") # vertices to color
cc = cc + 3
fmtl.close()
fobj.close()
Expand Down Expand Up @@ -2612,7 +2612,7 @@ def _export_ply(
)
else:
v = np.empty(self.vertices.shape[1], dtype=vs[:3])
line = f"element face {int(nv/3)}\n"
line = f"element face {int(nv / 3)}\n"
f.write(line.encode("latin-1"))
f.write(b"property list uchar int vertex_indices\n")
if color_field is not None and sample_type == "face":
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/tests/test_particle_trajectories_pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,9 @@ def test_default_field_tuple(particle_trajectories_test_dataset, ptype):
ptype = ptype if ptype else "all" # ptype defaults to "all"
for k in trajs.field_data.keys():
assert isinstance(k, tuple), f"Expected key to be tuple, received {type(k)}"
assert (
k[0] == ptype
), f"Default field type ({k[0]}) does not match expected ({ptype})"
assert k[0] == ptype, (
f"Default field type ({k[0]}) does not match expected ({ptype})"
)
assert ("all", k[1]) in pfields, f"Unexpected field: {k[1]}"


Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/amrex/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ def _to_tex_isotope(self) -> str:

def _to_tex_molecule(self) -> str:
return "".join(
rf"{element}_{{{count if count>1 else ''}}}"
rf"{element}_{{{count if count > 1 else ''}}}"
for element, count in self._spec
)

Expand Down
4 changes: 2 additions & 2 deletions yt/frontends/art/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def _set_code_unit_attributes(self):
mass = aM0 * 1.98892e33

self.cosmological_simulation = True
setdefaultattr(self, "mass_unit", self.quan(mass, f"g*{ng ** 3}"))
setdefaultattr(self, "mass_unit", self.quan(mass, f"g*{ng**3}"))
setdefaultattr(self, "length_unit", self.quan(box_proper, "Mpc"))
setdefaultattr(self, "velocity_unit", self.quan(velocity, "cm/s"))
setdefaultattr(self, "time_unit", self.length_unit / self.velocity_unit)
Expand Down Expand Up @@ -519,7 +519,7 @@ def _set_code_unit_attributes(self):
mass = aM0 * 1.98892e33

self.cosmological_simulation = True
self.mass_unit = self.quan(mass, f"g*{ng ** 3}")
self.mass_unit = self.quan(mass, f"g*{ng**3}")
self.length_unit = self.quan(box_proper, "Mpc")
self.velocity_unit = self.quan(velocity, "cm/s")
self.time_unit = self.length_unit / self.velocity_unit
Expand Down
4 changes: 2 additions & 2 deletions yt/frontends/athena_pp/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def setup_fluid_fields(self):
# Add velocity fields
vel_prefix = "velocity"
for i, comp in enumerate(self.ds.coordinates.axis_order):
vel_field = ("athena_pp", f"vel{i+1}")
mom_field = ("athena_pp", f"mom{i+1}")
vel_field = ("athena_pp", f"vel{i + 1}")
mom_field = ("athena_pp", f"mom{i + 1}")
if vel_field in self.field_list:
self.add_output_field(
vel_field, sampling_type="cell", units="code_length/code_time"
Expand Down
6 changes: 3 additions & 3 deletions yt/frontends/chimera/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,9 @@ def _detect_output_fields(self): # Reads in the available data fields
for i in f["abundance"]
if np.shape(f["abundance"][i]) == np.shape(f["fluid"]["rho_c"])
]
e_rms = [("chimera", f"e_rms_{i+1}") for i in range(4)]
lumin = [("chimera", f"lumin_{i+1}") for i in range(4)]
num_lumin = [("chimera", f"num_lumin_{i+1}") for i in range(4)]
e_rms = [("chimera", f"e_rms_{i + 1}") for i in range(4)]
lumin = [("chimera", f"lumin_{i + 1}") for i in range(4)]
num_lumin = [("chimera", f"num_lumin_{i + 1}") for i in range(4)]
a_name = [
("chimera", i.decode("utf-8").strip()) for i in f["abundance"]["a_name"]
]
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/exodus_ii/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size):
if fname in self.elem_fields:
field_ind = self.elem_fields.index(fname)
for g, mesh_id in zip(objs, mesh_ids, strict=True):
varname = f"vals_elem_var{field_ind+1}eb{mesh_id}"
varname = f"vals_elem_var{field_ind + 1}eb{mesh_id}"
fdata = ds.variables[varname][:]
data = fdata[self.ds.step, :]
ind += g.select(selector, data, rv[field], ind) # caches
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/fits/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def _ensure_same_dims(self, hdu):
ds = self.dataset
conditions = [hdu.header["naxis"] != ds.primary_header["naxis"]]
for i in range(ds.naxis):
nax = f"naxis{i+1}"
nax = f"naxis{i + 1}"
conditions.append(hdu.header[nax] != ds.primary_header[nax])
if np.any(conditions):
return False
Expand Down Expand Up @@ -162,7 +162,7 @@ def _detect_output_fields(self):
fname += f"_{dup_field_index[fname]}"
for k in range(naxis4):
if naxis4 > 1:
fname += f"_{hdu.header['CTYPE4']}_{k+1}"
fname += f"_{hdu.header['CTYPE4']}_{k + 1}"
self._axis_map[fname] = k
self._file_map[fname] = fits_file
self._ext_map[fname] = j
Expand Down Expand Up @@ -300,7 +300,7 @@ def check_sky_coords(filename, ndim):
if header["naxis"] < ndim:
return False
axis_names = [
header.get(f"ctype{i+1}", "") for i in range(header["naxis"])
header.get(f"ctype{i + 1}", "") for i in range(header["naxis"])
]
if len(axis_names) == 3 and axis_names.count("LINEAR") == 2:
return any(a[0] in spec_prefixes for a in axis_names)
Expand Down Expand Up @@ -500,9 +500,10 @@ def _determine_structure(self):
self.primary_header, self.first_image = find_primary_header(self._handle)
self.naxis = self.primary_header["naxis"]
self.axis_names = [
self.primary_header.get(f"ctype{i+1}", "LINEAR") for i in range(self.naxis)
self.primary_header.get(f"ctype{i + 1}", "LINEAR")
for i in range(self.naxis)
]
self.dims = [self.primary_header[f"naxis{i+1}"] for i in range(self.naxis)]
self.dims = [self.primary_header[f"naxis{i + 1}"] for i in range(self.naxis)]

def _determine_wcs(self):
wcs = _astropy.pywcs.WCS(header=self.primary_header)
Expand Down
3 changes: 1 addition & 2 deletions yt/frontends/gamer/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,7 @@ def _validate_parent_children_relationship(self):
# children->parent == itself
for c in grid.Children:
assert c.Parent is grid, (
f"Grid {grid.id}, Children {c.id}, "
f"Children->Parent {c.Parent.id}"
f"Grid {grid.id}, Children {c.id}, Children->Parent {c.Parent.id}"
)

# all refinement grids should have parent
Expand Down
3 changes: 1 addition & 2 deletions yt/frontends/open_pmd/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,7 @@ def __init__(self, ds, field_list):
except KeyError:
if recname != "particlePatches":
mylog.info(
"open_pmd - %s_%s does not seem to have "
"unitDimension",
"open_pmd - %s_%s does not seem to have unitDimension",
pname,
recname,
)
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/parthenon/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def setup_fluid_fields(self):
for i, comp in enumerate(self.ds.coordinates.axis_order):
# Support both current and legacy scheme
for mom_field_name in ["MomentumDensity", "cons_momentum_density_"]:
mom_field = ("parthenon", f"{mom_field_name}{i+1}")
mom_field = ("parthenon", f"{mom_field_name}{i + 1}")
if mom_field in self.field_list:
self.add_field(
("gas", f"velocity_{comp}"),
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/ramses/tests/test_file_sanitizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_invalid_sanitizing(valid_path_tuples, invalid_path_tuples):

for path in chain(*(pt.paths_to_try for pt in valid_path_tuples)):
expected_error_message = re.escape(
f"No such file or directory '{str(path/'does_not_exist.txt')}'"
f"No such file or directory '{str(path / 'does_not_exist.txt')}'"
)
sanitizer = RAMSESFileSanitizer(path / "does_not_exist.txt")
with pytest.raises(FileNotFoundError, match=expected_error_message):
Expand Down
2 changes: 1 addition & 1 deletion yt/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1156,7 +1156,7 @@ def validate_float(obj):
def validate_sequence(obj):
if obj is not None and not is_sequence(obj):
raise TypeError(
"Expected an iterable object, " f"received {_full_type_name(obj)!r}"
f"Expected an iterable object, received {_full_type_name(obj)!r}"
)


Expand Down
3 changes: 1 addition & 2 deletions yt/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,7 @@ def _sanitize_axis_order_args(
geometry_str: str
if isinstance(geometry, tuple):
issue_deprecation_warning(
f"Received a tuple as {geometry=}\n"
"Use the `axis_order` argument instead.",
f"Received a tuple as {geometry=}\nUse the `axis_order` argument instead.",
since="4.2",
stacklevel=4,
)
Expand Down
2 changes: 1 addition & 1 deletion yt/tests/test_load_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def test_load_sample_small_dataset(

text = textwrap.dedent(
f"""
'{fn.replace('/', os.path.sep)}' is not available locally. Looking up online.
'{fn.replace("/", os.path.sep)}' is not available locally. Looking up online.
Downloading from https://yt-project.org/data/{archive}
Untaring downloaded file to '{str(tmp_data_dir)}'
"""
Expand Down
3 changes: 1 addition & 2 deletions yt/visualization/_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,8 +240,7 @@ def get_dynamic_range(
return dvmax / self.dynamic_range, dvmax
else:
raise TypeError(
"Cannot set dynamic range with both "
"vmin and vmax already constrained."
"Cannot set dynamic range with both vmin and vmax already constrained."
)

@property
Expand Down
3 changes: 1 addition & 2 deletions yt/visualization/plot_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,8 +351,7 @@ def _switch_ds(self, new_ds, data_source=None):
if data_source is not None:
if name != "proj":
raise RuntimeError(
"The data_source keyword argument "
"is only defined for projections."
"The data_source keyword argument is only defined for projections."
)
kwargs["data_source"] = data_source

Expand Down
4 changes: 1 addition & 3 deletions yt/visualization/volume_rendering/lens.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,7 @@ def project_to_plane(self, camera, pos, res=None):

def __repr__(self):
return (
"<Lens Object>:\n"
"\tlens_type:plane-parallel\n"
f"\tviewpoint:{self.viewpoint}"
f"<Lens Object>:\n\tlens_type:plane-parallel\n\tviewpoint:{self.viewpoint}"
)


Expand Down
Loading