diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7322ceb912b..584aaefe014 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: additional_dependencies: [black==24.3.0] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.6 + rev: v0.9.4 hooks: - id: ruff-format - id: ruff diff --git a/conftest.py b/conftest.py index bf8e2910305..303990a562b 100644 --- a/conftest.py +++ b/conftest.py @@ -156,8 +156,7 @@ def pytest_configure(config): # https://github.com/h5py/h5py/pull/2242 config.addinivalue_line( "filterwarnings", - "ignore:`product` is deprecated as of NumPy 1.25.0" - ":DeprecationWarning", + "ignore:`product` is deprecated as of NumPy 1.25.0:DeprecationWarning", ) if PANDAS_VERSION is not None and PANDAS_VERSION >= Version("2.2.0"): diff --git a/yt/data_objects/construction_data_containers.py b/yt/data_objects/construction_data_containers.py index e80308047d2..6c20bdf310c 100644 --- a/yt/data_objects/construction_data_containers.py +++ b/yt/data_objects/construction_data_containers.py @@ -2233,7 +2233,7 @@ def _export_obj( fobj.write( f"usemtl {omname}\n" ) # which material to use for this face (color) - fobj.write(f"f {cc} {cc+1} {cc+2}\n\n") # vertices to color + fobj.write(f"f {cc} {cc + 1} {cc + 2}\n\n") # vertices to color cc = cc + 3 fmtl.close() fobj.close() @@ -2612,7 +2612,7 @@ def _export_ply( ) else: v = np.empty(self.vertices.shape[1], dtype=vs[:3]) - line = f"element face {int(nv/3)}\n" + line = f"element face {int(nv / 3)}\n" f.write(line.encode("latin-1")) f.write(b"property list uchar int vertex_indices\n") if color_field is not None and sample_type == "face": diff --git a/yt/data_objects/tests/test_particle_trajectories_pytest.py b/yt/data_objects/tests/test_particle_trajectories_pytest.py index 9fa30049820..87128b8881f 100644 --- a/yt/data_objects/tests/test_particle_trajectories_pytest.py +++ b/yt/data_objects/tests/test_particle_trajectories_pytest.py @@ -129,9 +129,9 @@ def test_default_field_tuple(particle_trajectories_test_dataset, ptype): ptype = ptype if ptype else "all" # ptype defaults to "all" for k in trajs.field_data.keys(): assert isinstance(k, tuple), f"Expected key to be tuple, received {type(k)}" - assert ( - k[0] == ptype - ), f"Default field type ({k[0]}) does not match expected ({ptype})" + assert k[0] == ptype, ( + f"Default field type ({k[0]}) does not match expected ({ptype})" + ) assert ("all", k[1]) in pfields, f"Unexpected field: {k[1]}" diff --git a/yt/frontends/amrex/fields.py b/yt/frontends/amrex/fields.py index 6354b7d65e0..c85ab603787 100644 --- a/yt/frontends/amrex/fields.py +++ b/yt/frontends/amrex/fields.py @@ -545,7 +545,7 @@ def _to_tex_isotope(self) -> str: def _to_tex_molecule(self) -> str: return "".join( - rf"{element}_{{{count if count>1 else ''}}}" + rf"{element}_{{{count if count > 1 else ''}}}" for element, count in self._spec ) diff --git a/yt/frontends/art/data_structures.py b/yt/frontends/art/data_structures.py index b17c9c67821..f0038ec33cb 100644 --- a/yt/frontends/art/data_structures.py +++ b/yt/frontends/art/data_structures.py @@ -236,7 +236,7 @@ def _set_code_unit_attributes(self): mass = aM0 * 1.98892e33 self.cosmological_simulation = True - setdefaultattr(self, "mass_unit", self.quan(mass, f"g*{ng ** 3}")) + setdefaultattr(self, "mass_unit", self.quan(mass, f"g*{ng**3}")) setdefaultattr(self, "length_unit", self.quan(box_proper, "Mpc")) setdefaultattr(self, "velocity_unit", self.quan(velocity, "cm/s")) setdefaultattr(self, "time_unit", self.length_unit / self.velocity_unit) @@ -519,7 +519,7 @@ def _set_code_unit_attributes(self): mass = aM0 * 1.98892e33 self.cosmological_simulation = True - self.mass_unit = self.quan(mass, f"g*{ng ** 3}") + self.mass_unit = self.quan(mass, f"g*{ng**3}") self.length_unit = self.quan(box_proper, "Mpc") self.velocity_unit = self.quan(velocity, "cm/s") self.time_unit = self.length_unit / self.velocity_unit diff --git a/yt/frontends/athena_pp/fields.py b/yt/frontends/athena_pp/fields.py index 5b24a60b107..5f6a7fdb4ee 100644 --- a/yt/frontends/athena_pp/fields.py +++ b/yt/frontends/athena_pp/fields.py @@ -31,8 +31,8 @@ def setup_fluid_fields(self): # Add velocity fields vel_prefix = "velocity" for i, comp in enumerate(self.ds.coordinates.axis_order): - vel_field = ("athena_pp", f"vel{i+1}") - mom_field = ("athena_pp", f"mom{i+1}") + vel_field = ("athena_pp", f"vel{i + 1}") + mom_field = ("athena_pp", f"mom{i + 1}") if vel_field in self.field_list: self.add_output_field( vel_field, sampling_type="cell", units="code_length/code_time" diff --git a/yt/frontends/chimera/data_structures.py b/yt/frontends/chimera/data_structures.py index c2806f40a78..b5ab93debda 100644 --- a/yt/frontends/chimera/data_structures.py +++ b/yt/frontends/chimera/data_structures.py @@ -165,9 +165,9 @@ def _detect_output_fields(self): # Reads in the available data fields for i in f["abundance"] if np.shape(f["abundance"][i]) == np.shape(f["fluid"]["rho_c"]) ] - e_rms = [("chimera", f"e_rms_{i+1}") for i in range(4)] - lumin = [("chimera", f"lumin_{i+1}") for i in range(4)] - num_lumin = [("chimera", f"num_lumin_{i+1}") for i in range(4)] + e_rms = [("chimera", f"e_rms_{i + 1}") for i in range(4)] + lumin = [("chimera", f"lumin_{i + 1}") for i in range(4)] + num_lumin = [("chimera", f"num_lumin_{i + 1}") for i in range(4)] a_name = [ ("chimera", i.decode("utf-8").strip()) for i in f["abundance"]["a_name"] ] diff --git a/yt/frontends/exodus_ii/io.py b/yt/frontends/exodus_ii/io.py index 609a881466e..c70b0278e2a 100644 --- a/yt/frontends/exodus_ii/io.py +++ b/yt/frontends/exodus_ii/io.py @@ -70,7 +70,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size): if fname in self.elem_fields: field_ind = self.elem_fields.index(fname) for g, mesh_id in zip(objs, mesh_ids, strict=True): - varname = f"vals_elem_var{field_ind+1}eb{mesh_id}" + varname = f"vals_elem_var{field_ind + 1}eb{mesh_id}" fdata = ds.variables[varname][:] data = fdata[self.ds.step, :] ind += g.select(selector, data, rv[field], ind) # caches diff --git a/yt/frontends/fits/data_structures.py b/yt/frontends/fits/data_structures.py index dbe3b5df6bf..b178f27822a 100644 --- a/yt/frontends/fits/data_structures.py +++ b/yt/frontends/fits/data_structures.py @@ -100,7 +100,7 @@ def _ensure_same_dims(self, hdu): ds = self.dataset conditions = [hdu.header["naxis"] != ds.primary_header["naxis"]] for i in range(ds.naxis): - nax = f"naxis{i+1}" + nax = f"naxis{i + 1}" conditions.append(hdu.header[nax] != ds.primary_header[nax]) if np.any(conditions): return False @@ -162,7 +162,7 @@ def _detect_output_fields(self): fname += f"_{dup_field_index[fname]}" for k in range(naxis4): if naxis4 > 1: - fname += f"_{hdu.header['CTYPE4']}_{k+1}" + fname += f"_{hdu.header['CTYPE4']}_{k + 1}" self._axis_map[fname] = k self._file_map[fname] = fits_file self._ext_map[fname] = j @@ -300,7 +300,7 @@ def check_sky_coords(filename, ndim): if header["naxis"] < ndim: return False axis_names = [ - header.get(f"ctype{i+1}", "") for i in range(header["naxis"]) + header.get(f"ctype{i + 1}", "") for i in range(header["naxis"]) ] if len(axis_names) == 3 and axis_names.count("LINEAR") == 2: return any(a[0] in spec_prefixes for a in axis_names) @@ -500,9 +500,10 @@ def _determine_structure(self): self.primary_header, self.first_image = find_primary_header(self._handle) self.naxis = self.primary_header["naxis"] self.axis_names = [ - self.primary_header.get(f"ctype{i+1}", "LINEAR") for i in range(self.naxis) + self.primary_header.get(f"ctype{i + 1}", "LINEAR") + for i in range(self.naxis) ] - self.dims = [self.primary_header[f"naxis{i+1}"] for i in range(self.naxis)] + self.dims = [self.primary_header[f"naxis{i + 1}"] for i in range(self.naxis)] def _determine_wcs(self): wcs = _astropy.pywcs.WCS(header=self.primary_header) diff --git a/yt/frontends/gamer/data_structures.py b/yt/frontends/gamer/data_structures.py index 729cb11e52f..745dca03ae1 100644 --- a/yt/frontends/gamer/data_structures.py +++ b/yt/frontends/gamer/data_structures.py @@ -151,8 +151,7 @@ def _validate_parent_children_relationship(self): # children->parent == itself for c in grid.Children: assert c.Parent is grid, ( - f"Grid {grid.id}, Children {c.id}, " - f"Children->Parent {c.Parent.id}" + f"Grid {grid.id}, Children {c.id}, Children->Parent {c.Parent.id}" ) # all refinement grids should have parent diff --git a/yt/frontends/open_pmd/fields.py b/yt/frontends/open_pmd/fields.py index a22cb217421..f9f13a3494c 100644 --- a/yt/frontends/open_pmd/fields.py +++ b/yt/frontends/open_pmd/fields.py @@ -214,8 +214,7 @@ def __init__(self, ds, field_list): except KeyError: if recname != "particlePatches": mylog.info( - "open_pmd - %s_%s does not seem to have " - "unitDimension", + "open_pmd - %s_%s does not seem to have unitDimension", pname, recname, ) diff --git a/yt/frontends/parthenon/fields.py b/yt/frontends/parthenon/fields.py index d8951d5ea1d..3baaa394b64 100644 --- a/yt/frontends/parthenon/fields.py +++ b/yt/frontends/parthenon/fields.py @@ -84,7 +84,7 @@ def setup_fluid_fields(self): for i, comp in enumerate(self.ds.coordinates.axis_order): # Support both current and legacy scheme for mom_field_name in ["MomentumDensity", "cons_momentum_density_"]: - mom_field = ("parthenon", f"{mom_field_name}{i+1}") + mom_field = ("parthenon", f"{mom_field_name}{i + 1}") if mom_field in self.field_list: self.add_field( ("gas", f"velocity_{comp}"), diff --git a/yt/frontends/ramses/tests/test_file_sanitizer.py b/yt/frontends/ramses/tests/test_file_sanitizer.py index af27d042afe..742682db415 100644 --- a/yt/frontends/ramses/tests/test_file_sanitizer.py +++ b/yt/frontends/ramses/tests/test_file_sanitizer.py @@ -96,7 +96,7 @@ def test_invalid_sanitizing(valid_path_tuples, invalid_path_tuples): for path in chain(*(pt.paths_to_try for pt in valid_path_tuples)): expected_error_message = re.escape( - f"No such file or directory '{str(path/'does_not_exist.txt')}'" + f"No such file or directory '{str(path / 'does_not_exist.txt')}'" ) sanitizer = RAMSESFileSanitizer(path / "does_not_exist.txt") with pytest.raises(FileNotFoundError, match=expected_error_message): diff --git a/yt/funcs.py b/yt/funcs.py index edb9e666f07..5954e042d91 100644 --- a/yt/funcs.py +++ b/yt/funcs.py @@ -1156,7 +1156,7 @@ def validate_float(obj): def validate_sequence(obj): if obj is not None and not is_sequence(obj): raise TypeError( - "Expected an iterable object, " f"received {_full_type_name(obj)!r}" + f"Expected an iterable object, received {_full_type_name(obj)!r}" ) diff --git a/yt/loaders.py b/yt/loaders.py index f057026f391..fa7093d8960 100644 --- a/yt/loaders.py +++ b/yt/loaders.py @@ -191,8 +191,7 @@ def _sanitize_axis_order_args( geometry_str: str if isinstance(geometry, tuple): issue_deprecation_warning( - f"Received a tuple as {geometry=}\n" - "Use the `axis_order` argument instead.", + f"Received a tuple as {geometry=}\nUse the `axis_order` argument instead.", since="4.2", stacklevel=4, ) diff --git a/yt/tests/test_load_sample.py b/yt/tests/test_load_sample.py index f76ba4d7f87..70179521f32 100644 --- a/yt/tests/test_load_sample.py +++ b/yt/tests/test_load_sample.py @@ -89,7 +89,7 @@ def test_load_sample_small_dataset( text = textwrap.dedent( f""" - '{fn.replace('/', os.path.sep)}' is not available locally. Looking up online. + '{fn.replace("/", os.path.sep)}' is not available locally. Looking up online. Downloading from https://yt-project.org/data/{archive} Untaring downloaded file to '{str(tmp_data_dir)}' """ diff --git a/yt/visualization/_handlers.py b/yt/visualization/_handlers.py index b1e72fef1d0..46ee88cdbbb 100644 --- a/yt/visualization/_handlers.py +++ b/yt/visualization/_handlers.py @@ -240,8 +240,7 @@ def get_dynamic_range( return dvmax / self.dynamic_range, dvmax else: raise TypeError( - "Cannot set dynamic range with both " - "vmin and vmax already constrained." + "Cannot set dynamic range with both vmin and vmax already constrained." ) @property diff --git a/yt/visualization/plot_container.py b/yt/visualization/plot_container.py index bf833aa50ac..91f49d3234b 100644 --- a/yt/visualization/plot_container.py +++ b/yt/visualization/plot_container.py @@ -351,8 +351,7 @@ def _switch_ds(self, new_ds, data_source=None): if data_source is not None: if name != "proj": raise RuntimeError( - "The data_source keyword argument " - "is only defined for projections." + "The data_source keyword argument is only defined for projections." ) kwargs["data_source"] = data_source diff --git a/yt/visualization/volume_rendering/lens.py b/yt/visualization/volume_rendering/lens.py index 7335cebe348..206dd25e24b 100644 --- a/yt/visualization/volume_rendering/lens.py +++ b/yt/visualization/volume_rendering/lens.py @@ -138,9 +138,7 @@ def project_to_plane(self, camera, pos, res=None): def __repr__(self): return ( - ":\n" - "\tlens_type:plane-parallel\n" - f"\tviewpoint:{self.viewpoint}" + f":\n\tlens_type:plane-parallel\n\tviewpoint:{self.viewpoint}" )