From 0988a0e2490d73a18e83248eeeb9b0ac53f54b2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sat, 23 Nov 2024 12:28:28 +0100 Subject: [PATCH 1/3] STY: manual fixes for newly flagged violations of UP031 --- .../construction_data_containers.py | 8 ++-- .../index_subobjects/unstructured_mesh.py | 4 +- yt/data_objects/level_sets/clump_handling.py | 2 +- yt/data_objects/particle_trajectories.py | 4 +- yt/data_objects/region_expression.py | 2 +- .../data_selection_objects.py | 4 +- yt/fields/interpolated_fields.py | 6 +-- yt/fields/xray_emission_fields.py | 4 +- yt/frontends/adaptahop/data_structures.py | 2 +- yt/frontends/amrex/data_structures.py | 12 +++--- yt/frontends/amrvac/fields.py | 22 +++++----- yt/frontends/art/data_structures.py | 4 +- yt/frontends/artio/data_structures.py | 4 +- yt/frontends/athena/data_structures.py | 3 +- yt/frontends/athena_pp/fields.py | 6 +-- yt/frontends/chombo/data_structures.py | 4 +- yt/frontends/chombo/io.py | 4 +- yt/frontends/eagle/fields.py | 2 +- yt/frontends/enzo/data_structures.py | 4 +- yt/frontends/enzo/io.py | 14 +++---- yt/frontends/enzo/simulation_handling.py | 12 +++--- yt/frontends/enzo_e/data_structures.py | 2 +- yt/frontends/enzo_e/tests/test_misc.py | 8 ++-- yt/frontends/exodus_ii/data_structures.py | 12 +++--- yt/frontends/exodus_ii/io.py | 7 ++-- yt/frontends/fits/data_structures.py | 21 ++++------ yt/frontends/gadget/io.py | 4 +- yt/frontends/gadget/simulation_handling.py | 2 +- yt/frontends/gadget/testing.py | 2 +- yt/frontends/gadget_fof/data_structures.py | 6 +-- yt/frontends/gadget_fof/fields.py | 4 +- yt/frontends/gadget_fof/io.py | 4 +- yt/frontends/gamer/data_structures.py | 20 ++++----- yt/frontends/gdf/io.py | 2 +- yt/frontends/open_pmd/data_structures.py | 2 +- yt/frontends/owls/simulation_handling.py | 2 +- yt/frontends/owls_subfind/io.py | 4 +- yt/frontends/parthenon/fields.py | 6 +-- yt/frontends/ramses/data_structures.py | 32 +++++--------- yt/frontends/ramses/field_handlers.py | 5 +-- yt/frontends/ramses/fields.py | 7 +--- yt/frontends/stream/io.py | 2 +- yt/frontends/ytdata/data_structures.py | 2 +- yt/funcs.py | 10 ++--- yt/geometry/grid_geometry_handler.py | 16 ++++--- yt/geometry/tests/test_particle_octree.py | 12 +++--- yt/loaders.py | 2 +- yt/utilities/command_line.py | 6 +-- yt/utilities/exceptions.py | 5 +-- .../conversion/conversion_athena.py | 24 +++++------ yt/utilities/grid_data_format/writer.py | 6 +-- yt/utilities/io_handler.py | 4 +- yt/utilities/lib/cykdtree/plot.py | 2 +- yt/utilities/mesh_code_generation.py | 4 +- .../parallel_analysis_interface.py | 12 ++---- yt/utilities/performance_counters.py | 35 +++++----------- yt/utilities/sdf.py | 6 +-- yt/utilities/tests/test_cosmology.py | 2 +- yt/visualization/eps_writer.py | 4 +- yt/visualization/plot_modifications.py | 4 +- .../tests/test_image_comp_2D_plots.py | 6 +-- .../volume_rendering/camera_path.py | 42 +++++++++++-------- .../volume_rendering/old_camera.py | 4 +- yt/visualization/volume_rendering/scene.py | 2 +- .../volume_rendering/transfer_functions.py | 4 +- 65 files changed, 228 insertions(+), 267 deletions(-) diff --git a/yt/data_objects/construction_data_containers.py b/yt/data_objects/construction_data_containers.py index 45b1b22878d..0681b08e582 100644 --- a/yt/data_objects/construction_data_containers.py +++ b/yt/data_objects/construction_data_containers.py @@ -2591,7 +2591,7 @@ def _export_ply( ] f.write(b"ply\n") f.write(b"format binary_little_endian 1.0\n") - line = "element vertex %i\n" % (nv) + line = f"element vertex {nv}\n" f.write(line.encode("latin-1")) f.write(b"property float x\n") f.write(b"property float y\n") @@ -2612,7 +2612,7 @@ def _export_ply( ) else: v = np.empty(self.vertices.shape[1], dtype=vs[:3]) - line = "element face %i\n" % (nv / 3) + line = f"element face {nv/3}\n" f.write(line.encode("latin-1")) f.write(b"property list uchar int vertex_indices\n") if color_field is not None and sample_type == "face": @@ -2751,9 +2751,9 @@ def export_sketchfab( # to a file. if self.vertices.shape[1] > 1e7: tfi = 0 - fn = "temp_model_%03i.ply" % tfi + fn = f"temp_model_{tfi:03}.ply" while os.path.exists(fn): - fn = "temp_model_%03i.ply" % tfi + fn = f"temp_model_{tfi:03}.ply" tfi += 1 open(fn, "wb").write(ply_file.read()) raise YTTooManyVertices(self.vertices.shape[1], fn) diff --git a/yt/data_objects/index_subobjects/unstructured_mesh.py b/yt/data_objects/index_subobjects/unstructured_mesh.py index 375a2ae1942..fd943ca38ad 100644 --- a/yt/data_objects/index_subobjects/unstructured_mesh.py +++ b/yt/data_objects/index_subobjects/unstructured_mesh.py @@ -48,7 +48,7 @@ def _check_consistency(self): mylog.debug("Connectivity is consistent.") def __repr__(self): - return "UnstructuredMesh_%04i" % (self.mesh_id) + return f"UnstructuredMesh_{self.mesh_id:04}" def get_global_startindex(self): """ @@ -151,7 +151,7 @@ class SemiStructuredMesh(UnstructuredMesh): _container_fields = ("dx", "dy", "dz") def __repr__(self): - return "SemiStructuredMesh_%04i" % (self.mesh_id) + return f"SemiStructuredMesh_{self.mesh_id:04}" def _generate_container_field(self, field): if self._current_chunk is None: diff --git a/yt/data_objects/level_sets/clump_handling.py b/yt/data_objects/level_sets/clump_handling.py index c463e0a5066..cbf2367035c 100644 --- a/yt/data_objects/level_sets/clump_handling.py +++ b/yt/data_objects/level_sets/clump_handling.py @@ -253,7 +253,7 @@ def save_as_dataset(self, filename=None, fields=None): """ ds = self.data.ds - keyword = "%s_clump_%d" % (str(ds), self.clump_id) + keyword = f"{ds}_clump_{self.clump_id}" filename = get_output_filename(filename, keyword, ".h5") # collect clump info fields diff --git a/yt/data_objects/particle_trajectories.py b/yt/data_objects/particle_trajectories.py index 495a29b445a..5c7bb7a2d4b 100644 --- a/yt/data_objects/particle_trajectories.py +++ b/yt/data_objects/particle_trajectories.py @@ -335,7 +335,7 @@ def trajectory_from_index(self, index): """ mask = np.isin(self.indices, (index,), assume_unique=True) if not np.any(mask): - print("The particle index %d is not in the list!" % (index)) + print(f"The particle index {index} is not in the list!") raise IndexError fields = sorted(self.field_data.keys()) traj = {} @@ -375,7 +375,7 @@ def write_out(self, filename_base): [self.times[it]] + [self[field][ix, it] for field in fields] ) ) - fid = open(filename_base + "_%d.dat" % self.indices[ix], "w") + fid = open(f"{filename_base}_{self.indices[ix]}.dat", "w") fid.writelines(outlines) fid.close() del fid diff --git a/yt/data_objects/region_expression.py b/yt/data_objects/region_expression.py index 7187c5606e8..be65a5a9263 100644 --- a/yt/data_objects/region_expression.py +++ b/yt/data_objects/region_expression.py @@ -115,7 +115,7 @@ def _create_slice(self, slice_tuple): dim = self.ds.dimensionality if dim < 2: raise ValueError( - "Can not create a slice from data with dimensionality '%d'" % dim + f"Can not create a slice from data with dimensionality '{dim}'" ) if dim == 2: coord = self.ds.domain_center[2] diff --git a/yt/data_objects/selection_objects/data_selection_objects.py b/yt/data_objects/selection_objects/data_selection_objects.py index c00de824242..fc47b693dc0 100644 --- a/yt/data_objects/selection_objects/data_selection_objects.py +++ b/yt/data_objects/selection_objects/data_selection_objects.py @@ -66,8 +66,8 @@ def __init__(self, ds, field_parameters, data_source=None): if data_source._dimensionality < self._dimensionality: raise RuntimeError( "Attempted to construct a DataContainer with a data_source " - "of lower dimensionality (%u vs %u)" - % (data_source._dimensionality, self._dimensionality) + "of lower dimensionality " + f"({data_source._dimensionality} vs {self._dimensionality})" ) self.field_parameters.update(data_source.field_parameters) self.quantities = DerivedQuantityCollection(self) diff --git a/yt/fields/interpolated_fields.py b/yt/fields/interpolated_fields.py index 6c9c745581a..8941eed4129 100644 --- a/yt/fields/interpolated_fields.py +++ b/yt/fields/interpolated_fields.py @@ -30,9 +30,9 @@ def add_interpolated_field( if len(axes_fields) != len(axes_data) or len(axes_fields) != len(table_data.shape): raise RuntimeError( - "Data dimension mismatch: data is %d, " - "%d axes data provided, and %d axes fields provided." - % (len(table_data.shape), len(axes_data), len(axes_fields)) + f"Data dimension mismatch: data is {len(table_data.shape)}, " + f"{len(axes_data)} axes data provided, " + f"and {len(axes_fields)} axes fields provided." ) int_class = _int_class[len(table_data.shape)] diff --git a/yt/fields/xray_emission_fields.py b/yt/fields/xray_emission_fields.py index 42abff987be..21e05a44ff7 100644 --- a/yt/fields/xray_emission_fields.py +++ b/yt/fields/xray_emission_fields.py @@ -20,7 +20,7 @@ def _get_data_file(table_type, data_dir=None): - data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type]) + data_file = f"{table_type}_emissivity_v{data_version[table_type]}.h5" if data_dir is None: supp_data_dir = ytcfg.get("yt", "supp_data_dir") data_dir = supp_data_dir if os.path.exists(supp_data_dir) else "." @@ -43,7 +43,7 @@ def __str__(self): class ObsoleteDataException(YTException): def __init__(self, table_type): - data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type]) + data_file = f"{table_type}_emissivity_v{data_version[table_type]}.h5" self.msg = "X-ray emissivity data is out of date.\n" self.msg += f"Download the latest data from {data_url}/{data_file}." diff --git a/yt/frontends/adaptahop/data_structures.py b/yt/frontends/adaptahop/data_structures.py index 8407f0bcefc..26f4f5e9be3 100644 --- a/yt/frontends/adaptahop/data_structures.py +++ b/yt/frontends/adaptahop/data_structures.py @@ -294,7 +294,7 @@ def __init__(self, ptype, particle_identifier, parent_ds, halo_ds): super().__init__(parent_ds, {}) def __repr__(self): - return "%s_%s_%09d" % (self.ds, self.ptype, self.particle_identifier) + return f"{self.ds}_{self.ptype}_{self.particle_identifier:09}" def __getitem__(self, key): return self.region[key] diff --git a/yt/frontends/amrex/data_structures.py b/yt/frontends/amrex/data_structures.py index de3cdfe325a..af04ec05201 100644 --- a/yt/frontends/amrex/data_structures.py +++ b/yt/frontends/amrex/data_structures.py @@ -196,9 +196,7 @@ def _generate_particle_fields(self, extra_field_names): self.known_int_fields = self.known_int_fields[0 : self.num_int_base] # these are extra integer fields - extra_int_fields = [ - "particle_int_comp%d" % i for i in range(self.num_int_extra) - ] + extra_int_fields = [f"particle_int_comp{i}" for i in range(self.num_int_extra)] self.known_int_fields.extend( [(self.particle_type, field) for field in extra_int_fields] ) @@ -216,7 +214,7 @@ def _generate_particle_fields(self, extra_field_names): assert len(extra_field_names) == self.num_real_extra else: extra_field_names = [ - "particle_real_comp%d" % i for i in range(self.num_real_extra) + f"particle_real_comp{i}" for i in range(self.num_real_extra) ] self.known_real_fields.extend( @@ -1478,7 +1476,7 @@ def __init__(self, header_fn): if len(line) == 1: line = f.readline() continue - self.data["species_%d" % i] = [float(val) for val in line] + self.data[f"species_{i}"] = [float(val) for val in line] i = i + 1 line = f.readline() @@ -1497,8 +1495,8 @@ def __init__(self, ds, dataset_type="boxlib_native"): for key, val in self.warpx_header.data.items(): if key.startswith("species_"): i = int(key.split("_")[-1]) - charge_name = "particle%.1d_charge" % i - mass_name = "particle%.1d_mass" % i + charge_name = f"particle{i}_charge" + mass_name = f"particle{i}_mass" self.parameters[charge_name] = val[0] self.parameters[mass_name] = val[1] diff --git a/yt/frontends/amrvac/fields.py b/yt/frontends/amrvac/fields.py index a7c965d51a1..2d147b06a38 100644 --- a/yt/frontends/amrvac/fields.py +++ b/yt/frontends/amrvac/fields.py @@ -33,7 +33,7 @@ def _velocity(field, data, idir, prefix=None): # used to generalize to dust fields if prefix is None: prefix = "" - moment = data["gas", "%smoment_%d" % (prefix, idir)] + moment = data["gas", f"{prefix}moment_{idir}"] rho = data["gas", f"{prefix}density"] mask1 = rho == 0 @@ -59,12 +59,12 @@ class AMRVACFieldInfo(FieldInfoContainer): # for now, define a finite family of dust fields (up to 100 species) MAXN_DUST_SPECIES = 100 known_dust_fields = [ - ("rhod%d" % idust, (code_density, ["dust%d_density" % idust], None)) + (f"rhod{idust}", (code_density, [f"dust{idust}_density"], None)) for idust in range(1, MAXN_DUST_SPECIES + 1) ] + [ ( - "m%dd%d" % (idir, idust), - (code_moment, ["dust%d_moment_%d" % (idust, idir)], None), + f"m{idir}d{idust}", + (code_moment, [f"dust{idust}_moment_{idir}"], None), ) for idust in range(1, MAXN_DUST_SPECIES + 1) for idir in (1, 2, 3) @@ -91,12 +91,12 @@ def _setup_velocity_fields(self, idust=None): if idust is None: dust_flag = dust_label = "" else: - dust_flag = "d%d" % idust - dust_label = "dust%d_" % idust + dust_flag = f"d{idust}" + dust_label = f"dust{idust}_" us = self.ds.unit_system for idir, alias in enumerate(direction_aliases[self.ds.geometry], start=1): - if ("amrvac", "m%d%s" % (idir, dust_flag)) not in self.field_list: + if ("amrvac", f"m{idir}{dust_flag}") not in self.field_list: break velocity_fn = functools.partial(_velocity, idir=idir, prefix=dust_label) self.add_field( @@ -107,20 +107,20 @@ def _setup_velocity_fields(self, idust=None): sampling_type="cell", ) self.alias( - ("gas", "%svelocity_%d" % (dust_label, idir)), + ("gas", f"{dust_label}velocity_{idir}"), ("gas", f"{dust_label}velocity_{alias}"), units=us["velocity"], ) self.alias( ("gas", f"{dust_label}moment_{alias}"), - ("gas", "%smoment_%d" % (dust_label, idir)), + ("gas", f"{dust_label}moment_{idir}"), units=us["density"] * us["velocity"], ) def _setup_dust_fields(self): idust = 1 imax = self.__class__.MAXN_DUST_SPECIES - while ("amrvac", "rhod%d" % idust) in self.field_list: + while ("amrvac", f"rhod{idust}") in self.field_list: if idust > imax: mylog.error( "Only the first %d dust species are currently read by yt. " @@ -138,7 +138,7 @@ def _setup_dust_fields(self): def _total_dust_density(field, data): tot = np.zeros_like(data["gas", "density"]) for idust in range(1, n_dust_found + 1): - tot += data["dust%d_density" % idust] + tot += data[f"dust{idust}_density"] return tot self.add_field( diff --git a/yt/frontends/art/data_structures.py b/yt/frontends/art/data_structures.py index c47f447dc03..b17c9c67821 100644 --- a/yt/frontends/art/data_structures.py +++ b/yt/frontends/art/data_structures.py @@ -320,7 +320,7 @@ def _parse_parameter_file(self): self.parameters["wspecies"] = wspecies[:n] self.parameters["lspecies"] = lspecies[:n] for specie in range(n): - self.particle_types.append("specie%i" % specie) + self.particle_types.append(f"specie{specie}") self.particle_types_raw = tuple(self.particle_types) ls_nonzero = np.diff(lspecies)[: n - 1] ls_nonzero = np.append(lspecies[0], ls_nonzero) @@ -611,7 +611,7 @@ def _parse_parameter_file(self): else: particle_header_vals[a1] = arr[:a2] for specie in range(n): - self.particle_types.append("specie%i" % specie) + self.particle_types.append(f"specie{specie}") self.particle_types_raw = tuple(self.particle_types) ls_nonzero = np.diff(lspecies)[: n - 1] ls_nonzero = np.append(lspecies[0], ls_nonzero) diff --git a/yt/frontends/artio/data_structures.py b/yt/frontends/artio/data_structures.py index 9ade430fd8b..2338d40e002 100644 --- a/yt/frontends/artio/data_structures.py +++ b/yt/frontends/artio/data_structures.py @@ -443,13 +443,13 @@ def _parse_parameter_file(self): if self.artio_parameters["num_primary_variables"][species] > 0: self.particle_variables[species].extend( self.artio_parameters[ - "species_%02d_primary_variable_labels" % (species,) + f"species_{species:02}_primary_variable_labels" ] ) if self.artio_parameters["num_secondary_variables"][species] > 0: self.particle_variables[species].extend( self.artio_parameters[ - "species_%02d_secondary_variable_labels" % (species,) + f"species_{species:02}_secondary_variable_labels" ] ) diff --git a/yt/frontends/athena/data_structures.py b/yt/frontends/athena/data_structures.py index f72b897277e..79683cd9f9c 100644 --- a/yt/frontends/athena/data_structures.py +++ b/yt/frontends/athena/data_structures.py @@ -297,7 +297,8 @@ def _parse_index(self): gridread["dimensions"][gridread["dimensions"] == 0] = 1 if np.prod(gridread["dimensions"]) != gridread["ncells"]: mylog.error( - "product of dimensions %i not equal to number of cells %i", + f"product of dimensions {np.prod(grid['dimensions'])} " + f"not equal to number of cells {grid['ncells']}", np.prod(gridread["dimensions"]), gridread["ncells"], ) diff --git a/yt/frontends/athena_pp/fields.py b/yt/frontends/athena_pp/fields.py index 65bb7d5535a..5b24a60b107 100644 --- a/yt/frontends/athena_pp/fields.py +++ b/yt/frontends/athena_pp/fields.py @@ -31,8 +31,8 @@ def setup_fluid_fields(self): # Add velocity fields vel_prefix = "velocity" for i, comp in enumerate(self.ds.coordinates.axis_order): - vel_field = ("athena_pp", "vel%d" % (i + 1)) - mom_field = ("athena_pp", "mom%d" % (i + 1)) + vel_field = ("athena_pp", f"vel{i+1}") + mom_field = ("athena_pp", f"mom{i+1}") if vel_field in self.field_list: self.add_output_field( vel_field, sampling_type="cell", units="code_length/code_time" @@ -113,5 +113,5 @@ def _temperature(field, data): ) setup_magnetic_field_aliases( - self, "athena_pp", ["Bcc%d" % ax for ax in (1, 2, 3)] + self, "athena_pp", [f"Bcc{ax}" for ax in (1, 2, 3)] ) diff --git a/yt/frontends/chombo/data_structures.py b/yt/frontends/chombo/data_structures.py index df93c64cda4..4c8020cbe26 100644 --- a/yt/frontends/chombo/data_structures.py +++ b/yt/frontends/chombo/data_structures.py @@ -330,9 +330,7 @@ def _determine_periodic(self): is_periodic = np.array([True, True, True]) for dir in range(self.dimensionality): try: - is_periodic[dir] = self._handle["/level_0"].attrs[ - "is_periodic_%d" % dir - ] + is_periodic[dir] = self._handle["/level_0"].attrs[f"is_periodic_{dir}"] except KeyError: is_periodic[dir] = True self._periodicity = tuple(is_periodic) diff --git a/yt/frontends/chombo/io.py b/yt/frontends/chombo/io.py index 7cadb72250a..5253528308a 100644 --- a/yt/frontends/chombo/io.py +++ b/yt/frontends/chombo/io.py @@ -33,7 +33,7 @@ def box_size(corners): num_comp = self._handle.attrs["num_components"] level = 0 while True: - lname = "level_%i" % level + lname = f"level_{level}" if lname not in self._handle: break boxes = self._handle["level_0"]["boxes"][()] @@ -87,7 +87,7 @@ def particle_field_index(self): return self._particle_field_index def _read_data(self, grid, field): - lstring = "level_%i" % grid.Level + lstring = f"level_{grid.Level}" lev = self._handle[lstring] dims = grid.ActiveDimensions shape = dims + 2 * self.ghost diff --git a/yt/frontends/eagle/fields.py b/yt/frontends/eagle/fields.py index 02ae3efd05f..f4aed98edd2 100644 --- a/yt/frontends/eagle/fields.py +++ b/yt/frontends/eagle/fields.py @@ -137,7 +137,7 @@ def _ion_density(field, data): index = eaglenetwork_ion_lookup[ion] # Ion to hydrogen number density ratio - ion_chem = data[ftype, "Chemistry_%03i" % index] + ion_chem = data[ftype, f"Chemistry_{index:03}"] # Mass of a single ion if ion[0:2].isalpha(): diff --git a/yt/frontends/enzo/data_structures.py b/yt/frontends/enzo/data_structures.py index acb7ea0c9f6..6f2d6cd0e07 100644 --- a/yt/frontends/enzo/data_structures.py +++ b/yt/frontends/enzo/data_structures.py @@ -574,8 +574,8 @@ def _parse_index(self): self.grids = np.empty(len(grids), dtype="object") for i, grid in enumerate(grids): if (i % 1e4) == 0: - mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids) - grid.filename = "Inline_processor_%07i" % (self.grid_procs[i, 0]) + mylog.debug(f"Prepared {i:>7} / {self.num_grids:>7} grids") + grid.filename = f"Inline_processor_{self.grid_procs[i, 0]:07}" grid._prepare_grid() grid._setup_dx() grid.proc_num = self.grid_procs[i, 0] diff --git a/yt/frontends/enzo/io.py b/yt/frontends/enzo/io.py index 781dcfea928..cfcd74cdbb9 100644 --- a/yt/frontends/enzo/io.py +++ b/yt/frontends/enzo/io.py @@ -20,7 +20,7 @@ def _read_field_names(self, grid): return [] f = h5py.File(grid.filename, mode="r") try: - group = f["/Grid%08i" % grid.id] + group = f[f"/Grid{grid.id:08}"] except KeyError: group = f fields = [] @@ -78,7 +78,7 @@ def _read_particle_fields(self, chunks, ptf, selector): nap = sum(g.NumberOfActiveParticles.values()) if g.NumberOfParticles == 0 and nap == 0: continue - ds = f.get("/Grid%08i" % g.id) + ds = f.get(f"/Grid{g.id:08}") for ptype, field_list in sorted(ptf.items()): if ptype == "io": if g.NumberOfParticles == 0: @@ -159,7 +159,7 @@ def _read_obj_field(self, obj, field, fid_data): data = np.empty(obj.ActiveDimensions[::-1], dtype=self._field_dtype) ftype, fname = field try: - node = "/Grid%08i/%s" % (obj.id, fname) + node = f"/Grid{obj.id:08}/{fname}" dg = h5py.h5d.open(fid, node.encode("latin-1")) except KeyError: if fname == "Dark_Matter_Density": @@ -308,7 +308,7 @@ class IOHandlerPacked2D(IOHandlerPackedHDF5): def _read_data_set(self, grid, field): f = h5py.File(grid.filename, mode="r") - ds = f["/Grid%08i/%s" % (grid.id, field)][:] + ds = f[f"/Grid{grid.id:08}/{field}"][:] f.close() return ds.transpose()[:, :, None] @@ -321,7 +321,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size): raise RuntimeError g = chunks[0].objs[0] f = h5py.File(g.filename, mode="r") - gds = f.get("/Grid%08i" % g.id) + gds = f.get(f"/Grid{g.id:08}") for ftype, fname in fields: rv[ftype, fname] = np.atleast_3d(gds.get(fname)[()].transpose()) f.close() @@ -346,7 +346,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size): if f is None: # print("Opening (count) %s" % g.filename) f = h5py.File(g.filename, mode="r") - gds = f.get("/Grid%08i" % g.id) + gds = f.get(f"/Grid{g.id:08}") if gds is None: gds = f for field in fields: @@ -364,6 +364,6 @@ class IOHandlerPacked1D(IOHandlerPackedHDF5): def _read_data_set(self, grid, field): f = h5py.File(grid.filename, mode="r") - ds = f["/Grid%08i/%s" % (grid.id, field)][:] + ds = f[f"/Grid{grid.id:08}/{field}"][:] f.close() return ds.transpose()[:, None, None] diff --git a/yt/frontends/enzo/simulation_handling.py b/yt/frontends/enzo/simulation_handling.py index 5fb432650a6..3a4fefc24c6 100644 --- a/yt/frontends/enzo/simulation_handling.py +++ b/yt/frontends/enzo/simulation_handling.py @@ -424,8 +424,8 @@ def _parse_parameter_file(self): for output in redshift_outputs: output["filename"] = os.path.join( self.parameters["GlobalDir"], - "%s%04d" % (self.parameters["RedshiftDumpDir"], output["index"]), - "%s%04d" % (self.parameters["RedshiftDumpName"], output["index"]), + f"{self.parameters['RedshiftDumpDir']}{output['index']:04}", + f"{self.parameters['RedshiftDumpName']}{output['index']:04}", ) del output["index"] self.all_redshift_outputs = redshift_outputs @@ -449,8 +449,8 @@ def _calculate_time_outputs(self): while current_time <= self.final_time + dt_datadump: filename = os.path.join( self.parameters["GlobalDir"], - "%s%04d" % (self.parameters["DataDumpDir"], index), - "%s%04d" % (self.parameters["DataDumpName"], index), + f"{self.parameters['DataDumpDir']}{index:04}", + f"{self.parameters['DataDumpName']}{index:04}", ) output = {"index": index, "filename": filename, "time": current_time.copy()} @@ -485,8 +485,8 @@ def _calculate_cycle_outputs(self): ): filename = os.path.join( self.parameters["GlobalDir"], - "%s%04d" % (self.parameters["DataDumpDir"], index), - "%s%04d" % (self.parameters["DataDumpName"], index), + f"{self.parameters['DataDumpDir']}{index:04}", + f"{self.parameters['DataDumpName']}{index:04}", ) output = {"index": index, "filename": filename, "cycle": cycle} diff --git a/yt/frontends/enzo_e/data_structures.py b/yt/frontends/enzo_e/data_structures.py index b218f181638..175a1042936 100644 --- a/yt/frontends/enzo_e/data_structures.py +++ b/yt/frontends/enzo_e/data_structures.py @@ -45,7 +45,7 @@ def __init__(self, id, index, block_name, filename=None): self.Level = -1 def __repr__(self): - return "EnzoEGrid_%04d" % self.id + return f"EnzoEGrid_{self.id:04}" def _prepare_grid(self): """Copies all the appropriate attributes from the index.""" diff --git a/yt/frontends/enzo_e/tests/test_misc.py b/yt/frontends/enzo_e/tests/test_misc.py index 3f2e95c37de..268e7000548 100644 --- a/yt/frontends/enzo_e/tests/test_misc.py +++ b/yt/frontends/enzo_e/tests/test_misc.py @@ -61,10 +61,10 @@ def flip_random_block_bit(block, rs): # flip a bit in modify_part, and return the new block name with this change flip_index = rs.randint(0, high=len(modify_part)) - parts[part_index] = "%s%d%s" % ( - modify_part[:flip_index], - (int(modify_part[flip_index]) + 1) % 2, - modify_part[flip_index + 1 :], + parts[part_index] = ( + f"{modify_part[:flip_index]}" + f"{(int(modify_part[flip_index]) + 1) % 2}" + f"{modify_part[flip_index + 1 :]}" ) descriptors[descr_index] = ":".join(parts) return "B" + "_".join(descriptors) diff --git a/yt/frontends/exodus_ii/data_structures.py b/yt/frontends/exodus_ii/data_structures.py index 98b28b60d03..3d9106f41f6 100644 --- a/yt/frontends/exodus_ii/data_structures.py +++ b/yt/frontends/exodus_ii/data_structures.py @@ -41,7 +41,7 @@ def _detect_output_fields(self): fnames = elem_names + node_names self.field_list = [] for i in range(1, len(self.meshes) + 1): - self.field_list += [("connect%d" % i, fname) for fname in fnames] + self.field_list += [(f"connect{i}", fname) for fname in fnames] self.field_list += [("all", fname) for fname in fnames] @@ -201,7 +201,7 @@ def _get_fluid_types(self): fluid_types = () i = 1 while True: - ftype = "connect%d" % i + ftype = f"connect{i}" if ftype in ds.variables: fluid_types += (ftype,) i += 1 @@ -240,7 +240,7 @@ def _get_current_time(self): return ds.variables["time_whole"][self.step] except IndexError as e: raise RuntimeError( - "Invalid step number, max is %d" % (self.num_steps - 1) + f"Invalid step number, max is {self.num_steps - 1}" ) from e except (KeyError, TypeError): return 0.0 @@ -317,7 +317,7 @@ def _read_coordinates(self): return coords def _apply_displacement(self, coords, mesh_id): - mesh_name = "connect%d" % (mesh_id + 1) + mesh_name = f"connect{mesh_id + 1}" new_coords = coords.copy() if mesh_name not in self.displacements: return new_coords @@ -329,7 +329,7 @@ def _apply_displacement(self, coords, mesh_id): for i, ax in enumerate(coord_axes): if f"disp_{ax}" in self.parameters["nod_names"]: ind = self.parameters["nod_names"].index(f"disp_{ax}") - disp = ds.variables["vals_nod_var%d" % (ind + 1)][self.step] + disp = ds.variables[f"vals_nod_var{ind + 1}"][self.step] new_coords[:, i] = coords[:, i] + fac * disp + offset[i] return new_coords @@ -342,7 +342,7 @@ def _read_connectivity(self): connectivity = [] with self._handle.open_ds() as ds: for i in range(self.parameters["num_meshes"]): - var = ds.variables["connect%d" % (i + 1)][:].astype("i8") + var = ds.variables[f"connect{i + 1}"][:].astype("i8") try: elem_type = var.elem_type.lower() if elem_type == "nfaced": diff --git a/yt/frontends/exodus_ii/io.py b/yt/frontends/exodus_ii/io.py index b4a560bbd67..609a881466e 100644 --- a/yt/frontends/exodus_ii/io.py +++ b/yt/frontends/exodus_ii/io.py @@ -62,7 +62,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size): objs = chunk.objs if fname in self.node_fields: field_ind = self.node_fields.index(fname) - fdata = ds.variables["vals_nod_var%d" % (field_ind + 1)] + fdata = ds.variables[f"vals_nod_var{field_ind + 1}"] for g in objs: ci = g.connectivity_indices - self._INDEX_OFFSET data = fdata[self.ds.step][ci] @@ -70,9 +70,8 @@ def _read_fluid_selection(self, chunks, selector, fields, size): if fname in self.elem_fields: field_ind = self.elem_fields.index(fname) for g, mesh_id in zip(objs, mesh_ids, strict=True): - fdata = ds.variables[ - "vals_elem_var%deb%s" % (field_ind + 1, mesh_id) - ][:] + varname = f"vals_elem_var{field_ind+1}eb{mesh_id}" + fdata = ds.variables[varname][:] data = fdata[self.ds.step, :] ind += g.select(selector, data, rv[field], ind) # caches rv[field] = rv[field][:ind] diff --git a/yt/frontends/fits/data_structures.py b/yt/frontends/fits/data_structures.py index e718636248f..dbe3b5df6bf 100644 --- a/yt/frontends/fits/data_structures.py +++ b/yt/frontends/fits/data_structures.py @@ -100,7 +100,7 @@ def _ensure_same_dims(self, hdu): ds = self.dataset conditions = [hdu.header["naxis"] != ds.primary_header["naxis"]] for i in range(ds.naxis): - nax = "naxis%d" % (i + 1) + nax = f"naxis{i+1}" conditions.append(hdu.header[nax] != ds.primary_header[nax]) if np.any(conditions): return False @@ -143,9 +143,9 @@ def _detect_output_fields(self): fname = self._guess_name_from_units(units) # When all else fails if fname is None: - fname = "image_%d" % (j) + fname = f"image_{j}" if self.ds.num_files > 1 and fname.startswith("image"): - fname += "_file_%d" % (i) + fname += f"_file_{i}" if ("fits", fname) in self.field_list: if fname in dup_field_index: dup_field_index[fname] += 1 @@ -159,10 +159,10 @@ def _detect_output_fields(self): fname, dup_field_index[fname], ) - fname += "_%d" % (dup_field_index[fname]) + fname += f"_{dup_field_index[fname]}" for k in range(naxis4): if naxis4 > 1: - fname += "_%s_%d" % (hdu.header["CTYPE4"], k + 1) + fname += f"_{hdu.header['CTYPE4']}_{k+1}" self._axis_map[fname] = k self._file_map[fname] = fits_file self._ext_map[fname] = j @@ -300,7 +300,7 @@ def check_sky_coords(filename, ndim): if header["naxis"] < ndim: return False axis_names = [ - header.get("ctype%d" % (i + 1), "") for i in range(header["naxis"]) + header.get(f"ctype{i+1}", "") for i in range(header["naxis"]) ] if len(axis_names) == 3 and axis_names.count("LINEAR") == 2: return any(a[0] in spec_prefixes for a in axis_names) @@ -500,12 +500,9 @@ def _determine_structure(self): self.primary_header, self.first_image = find_primary_header(self._handle) self.naxis = self.primary_header["naxis"] self.axis_names = [ - self.primary_header.get("ctype%d" % (i + 1), "LINEAR") - for i in range(self.naxis) - ] - self.dims = [ - self.primary_header["naxis%d" % (i + 1)] for i in range(self.naxis) + self.primary_header.get(f"ctype{i+1}", "LINEAR") for i in range(self.naxis) ] + self.dims = [self.primary_header[f"naxis{i+1}"] for i in range(self.naxis)] def _determine_wcs(self): wcs = _astropy.pywcs.WCS(header=self.primary_header) @@ -673,7 +670,7 @@ def _determine_wcs(self): super()._determine_wcs() end = min(self.dimensionality + 1, 4) self.ctypes = np.array( - [self.primary_header["CTYPE%d" % (i)] for i in range(1, end)] + [self.primary_header[f"CTYPE{i}"] for i in range(1, end)] ) self.wcs_2d = self.wcs diff --git a/yt/frontends/gadget/io.py b/yt/frontends/gadget/io.py index 38c367e035c..789c6637ed5 100644 --- a/yt/frontends/gadget/io.py +++ b/yt/frontends/gadget/io.py @@ -292,10 +292,10 @@ def _identify_fields(self, data_file): # Vector of metallicity or passive scalar for i in range(g[k].shape[1]): key = "MetalMasses" if k == "Mass of Metals" else k - fields.append((ptype, "%s_%02i" % (key, i))) + fields.append((ptype, f"{key}_{i:02}")) elif k == "ChemistryAbundances" and len(g[k].shape) > 1: for i in range(g[k].shape[1]): - fields.append((ptype, "Chemistry_%03i" % i)) + fields.append((ptype, f"Chemistry_{i:03}")) else: kk = k if not hasattr(g[kk], "shape"): diff --git a/yt/frontends/gadget/simulation_handling.py b/yt/frontends/gadget/simulation_handling.py index b53c08ec89c..f6f6481493c 100644 --- a/yt/frontends/gadget/simulation_handling.py +++ b/yt/frontends/gadget/simulation_handling.py @@ -403,7 +403,7 @@ def _snapshot_format(self, index=None): if index is None: count = "*" else: - count = "%03d" % index + count = f"{index:03}" filename = f"{self.parameters['SnapshotFileBase']}_{count}{suffix}" return os.path.join(self.data_dir, filename) diff --git a/yt/frontends/gadget/testing.py b/yt/frontends/gadget/testing.py index d2b1f4858ee..1e983eb82fb 100644 --- a/yt/frontends/gadget/testing.py +++ b/yt/frontends/gadget/testing.py @@ -27,7 +27,7 @@ def write_record(fp, data, endian): def write_block(fp, data, endian, fmt, block_id): assert fmt in [1, 2] - block_id = "%-4s" % block_id + block_id = f"{block_id:<4}" if fmt == 2: block_id_dtype = np.dtype([("id", "S", 4), ("offset", endian + "i4")]) block_id_data = np.zeros(1, dtype=block_id_dtype) diff --git a/yt/frontends/gadget_fof/data_structures.py b/yt/frontends/gadget_fof/data_structures.py index 953790bb8d3..badf1560a82 100644 --- a/yt/frontends/gadget_fof/data_structures.py +++ b/yt/frontends/gadget_fof/data_structures.py @@ -540,8 +540,8 @@ def __init__(self, ptype, particle_identifier, ds=None): if self.particle_identifier >= self.index.particle_count[ptype]: raise RuntimeError( - "%s %d requested, but only %d %s objects exist." - % (ptype, particle_identifier, self.index.particle_count[ptype], ptype) + f"{ptype} {particle_identifier} requested, " + f"but only {self.index.particle_count[ptype]} {ptype} objects exist." ) # Find the file that has the scalar values for this halo. @@ -651,4 +651,4 @@ def __init__(self, ptype, particle_identifier, ds=None): setattr(self, attr, self[self.ptype, f"particle_{attr}"][0]) def __repr__(self): - return "%s_%s_%09d" % (self.ds, self.ptype, self.particle_identifier) + return f"{self.ds}_{self.ptype}_{self.particle_identifier:09}" diff --git a/yt/frontends/gadget_fof/fields.py b/yt/frontends/gadget_fof/fields.py index a05c1f9abb0..ad36bcbc116 100644 --- a/yt/frontends/gadget_fof/fields.py +++ b/yt/frontends/gadget_fof/fields.py @@ -7,13 +7,13 @@ _pnums = 6 _type_fields: KnownFieldsT = tuple( - ("%s%sType_%d" % (ptype, field, pnum), (units, [], None)) + (f"{ptype}{field}Type_{pnum}", (units, [], None)) for pnum in range(_pnums) for field, units in (("Mass", m_units), ("Len", p_units)) for ptype in ("Group", "Subhalo") ) _sub_type_fields: KnownFieldsT = tuple( - ("Subhalo%sType_%d" % (field, pnum), (units, [], None)) + (f"Subhalo{field}Type_{pnum}", (units, [], None)) for pnum in range(_pnums) for field, units in ( ("HalfmassRad", p_units), diff --git a/yt/frontends/gadget_fof/io.py b/yt/frontends/gadget_fof/io.py index f98837ad9d2..28dfd0a7efb 100644 --- a/yt/frontends/gadget_fof/io.py +++ b/yt/frontends/gadget_fof/io.py @@ -303,7 +303,7 @@ def subfind_field_list(fh, ptype, pcount): fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :] if my_div > 1: for i in range(int(my_div)): - fields.append((ptype, "%s_%d" % (fname, i))) + fields.append((ptype, f"{fname}_{i}")) else: fields.append((ptype, fname)) elif ( @@ -317,7 +317,7 @@ def subfind_field_list(fh, ptype, pcount): fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :] if my_div > 1: for i in range(int(my_div)): - fields.append(("Group", "%s_%d" % (fname, i))) + fields.append(("Group", f"{fname}_{i}")) else: fields.append(("Group", fname)) offset_fields.append(fname) diff --git a/yt/frontends/gamer/data_structures.py b/yt/frontends/gamer/data_structures.py index beede0cb639..9ce4c0cf169 100644 --- a/yt/frontends/gamer/data_structures.py +++ b/yt/frontends/gamer/data_structures.py @@ -173,19 +173,19 @@ def _validate_parent_children_relationship(self): # edges between children and parent for c in grid.Children: for d in range(0, 3): - msgL = ( - "Grid %d, Child %d, Grid->EdgeL %14.7e, Children->EdgeL %14.7e" - % (grid.id, c.id, grid.LeftEdge[d], c.LeftEdge[d]) - ) - msgR = ( - "Grid %d, Child %d, Grid->EdgeR %14.7e, Children->EdgeR %14.7e" - % (grid.id, c.id, grid.RightEdge[d], c.RightEdge[d]) - ) if not grid.LeftEdge[d] <= c.LeftEdge[d]: - raise ValueError(msgL) + raise ValueError( + f"Grid {grid.id}, Child {c.id}, " + f"Grid->EdgeL {grid.LeftEdge[d]:14.7e}, " + f"Children->EdgeL {c.LeftEdge:14.7e}" + ) if not grid.RightEdge[d] >= c.RightEdge[d]: - raise ValueError(msgR) + raise ValueError( + f"Grid {grid.id}, Child {c.id}, " + f"Grid->EdgeR {grid.RightEdge[d]:14.7e}, " + f"Children->EdgeR {c.RightEdge:14.7e}" + ) mylog.info("Check passed") diff --git a/yt/frontends/gdf/io.py b/yt/frontends/gdf/io.py index 36b0fc581f2..bf33f429082 100644 --- a/yt/frontends/gdf/io.py +++ b/yt/frontends/gdf/io.py @@ -7,7 +7,7 @@ def _grid_dname(grid_id): - return "/data/grid_%010i" % grid_id + return f"/data/grid_{grid_id:010}" def _field_dname(grid_id, field_name): diff --git a/yt/frontends/open_pmd/data_structures.py b/yt/frontends/open_pmd/data_structures.py index 7b215a866ba..4293d5c67f7 100644 --- a/yt/frontends/open_pmd/data_structures.py +++ b/yt/frontends/open_pmd/data_structures.py @@ -57,7 +57,7 @@ def __init__(self, gid, index, level=-1, fi=0, fo=0, pi=0, po=0, ft=None, pt=Non self.Level = level def __str__(self): - return "OpenPMDGrid_%04i (%s)" % (self.id, self.ActiveDimensions) + return f"OpenPMDGrid_{self.id:04} ({self.ActiveDimensions})" class OpenPMDHierarchy(GridIndex): diff --git a/yt/frontends/owls/simulation_handling.py b/yt/frontends/owls/simulation_handling.py index 78d3dc40ee2..ba323662ff2 100644 --- a/yt/frontends/owls/simulation_handling.py +++ b/yt/frontends/owls/simulation_handling.py @@ -55,7 +55,7 @@ def _snapshot_format(self, index=None): if index is None: count = "*" else: - count = "%03d" % index + count = f"{index:03}" keyword = f"{self.parameters['SnapshotFileBase']}_{count}" filename = os.path.join(keyword, f"{keyword}{suffix}") return os.path.join(data_dir, filename) diff --git a/yt/frontends/owls_subfind/io.py b/yt/frontends/owls_subfind/io.py index a145cd59ea8..bf37d4697ec 100644 --- a/yt/frontends/owls_subfind/io.py +++ b/yt/frontends/owls_subfind/io.py @@ -179,7 +179,7 @@ def subfind_field_list(fh, ptype, pcount): fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :] if my_div > 1: for i in range(int(my_div)): - fields.append((ptype, "%s_%d" % (fname, i))) + fields.append((ptype, f"{fname}_{i}")) else: fields.append((ptype, fname)) elif ( @@ -193,7 +193,7 @@ def subfind_field_list(fh, ptype, pcount): fname = fh[field].name[fh[field].name.find(ptype) + len(ptype) + 1 :] if my_div > 1: for i in range(int(my_div)): - fields.append(("FOF", "%s_%d" % (fname, i))) + fields.append(("FOF", f"{fname}_{i}")) else: fields.append(("FOF", fname)) offset_fields.append(fname) diff --git a/yt/frontends/parthenon/fields.py b/yt/frontends/parthenon/fields.py index 90c342d29d9..d8951d5ea1d 100644 --- a/yt/frontends/parthenon/fields.py +++ b/yt/frontends/parthenon/fields.py @@ -161,11 +161,11 @@ def _temperature(field, data): # We can simply all all variants as only fields present will be added setup_magnetic_field_aliases( - self, "parthenon", ["MagneticField%d" % ax for ax in (1, 2, 3)] + self, "parthenon", [f"MagneticField{ax}" for ax in (1, 2, 3)] ) setup_magnetic_field_aliases( - self, "parthenon", ["prim_magnetic_field_%d" % ax for ax in (1, 2, 3)] + self, "parthenon", [f"prim_magnetic_field_{ax}" for ax in (1, 2, 3)] ) setup_magnetic_field_aliases( - self, "parthenon", ["cons_magnetic_field_%d" % ax for ax in (1, 2, 3)] + self, "parthenon", [f"cons_magnetic_field_{ax}" for ax in (1, 2, 3)] ) diff --git a/yt/frontends/ramses/data_structures.py b/yt/frontends/ramses/data_structures.py index 34cd6607c30..067d4c5a2cf 100644 --- a/yt/frontends/ramses/data_structures.py +++ b/yt/frontends/ramses/data_structures.py @@ -184,21 +184,15 @@ def __init__(self, ds, domain_id): self.ds = ds self.domain_id = domain_id - num = os.path.basename(ds.parameter_filename).split(".")[0].split("_")[1] - rootdir = ds.root_folder - basedir = os.path.abspath(os.path.dirname(ds.parameter_filename)) - basename = "%s/%%s_%s.out%05i" % (basedir, num, domain_id) - part_file_descriptor = f"{basedir}/part_file_descriptor.txt" + num = ds.basename.split(".")[0].split("_")[1] + basename = os.path.join(ds.directory, f"%s_{num}.out{domain_id:05}") + part_file_descriptor = os.path.join(ds.directory, "part_file_descriptor.txt") if ds.num_groups > 0: igroup = ((domain_id - 1) // ds.group_size) + 1 - basename = "%s/group_%05i/%%s_%s.out%05i" % ( - rootdir, - igroup, - num, - domain_id, + basename = os.path.join( + ds.root_folder, f"group_{igroup:05}", os.path.basename(basename) ) - else: - basename = "%s/%%s_%s.out%05i" % (basedir, num, domain_id) + for t in ["grav", "amr"]: setattr(self, f"{t}_fn", basename % t) self._part_file_descriptor = part_file_descriptor @@ -219,7 +213,7 @@ def __init__(self, ds, domain_id): self.particle_handlers = particle_handlers def __repr__(self): - return "RAMSESDomainFile: %i" % self.domain_id + return f"RAMSESDomainFile: {self.domain_id}" @property def level_count(self): @@ -756,16 +750,10 @@ def print_stats(self): print(header) print(f"{len(header.expandtabs()) * '-'}") for level in range(self.dataset.min_level + self.dataset.max_level + 2): - print( - "% 3i\t% 14i\t% 14i" - % ( - level, - self.level_stats["numcells"][level], - np.ceil(self.level_stats["numcells"][level] ** (1.0 / 3)), - ) - ) + ncells = self.level_stats["numcells"][level] + print(f"{level:>3}\t{ncells:>14}\t{np.ceil(ncells ** (1.0 / 3)):>14}") print("-" * 46) - print(" \t% 14i" % (self.level_stats["numcells"].sum())) + print(f" \t{self.level_stats['numcells'].sum():>14}") print("\n") dx = self.get_smallest_dx() diff --git a/yt/frontends/ramses/field_handlers.py b/yt/frontends/ramses/field_handlers.py index 288b1580d6a..bca0f86643f 100644 --- a/yt/frontends/ramses/field_handlers.py +++ b/yt/frontends/ramses/field_handlers.py @@ -307,10 +307,9 @@ def detect_fields(cls, ds): num = os.path.basename(ds.parameter_filename).split(".")[0].split("_")[1] testdomain = 1 # Just pick the first domain file to read - basepath = os.path.abspath(os.path.dirname(ds.parameter_filename)) - basename = "%s/%%s_%s.out%05i" % (basepath, num, testdomain) + basename = os.path.join(ds.directory, f"%s_{num}.out{testdomain:05}") fname = basename % "hydro" - fname_desc = os.path.join(basepath, cls.file_descriptor) + fname_desc = os.path.join(ds.directory, cls.file_descriptor) attrs = cls.attrs with FortranFile(fname) as fd: diff --git a/yt/frontends/ramses/fields.py b/yt/frontends/ramses/fields.py index e1da99d73ee..13a1ec538be 100644 --- a/yt/frontends/ramses/fields.py +++ b/yt/frontends/ramses/fields.py @@ -422,11 +422,8 @@ def _photon_flux(field, data): def create_cooling_fields(self) -> bool: "Create cooling fields from the cooling files. Return True if successful." - num = os.path.basename(self.ds.parameter_filename).split(".")[0].split("_")[1] - filename = "%s/cooling_%05i.out" % ( - os.path.dirname(self.ds.parameter_filename), - int(num), - ) + num = int(self.ds.basename.split(".")[0].split("_")[1]) + filename = os.path.join(self.ds.directory, f"cooling_{num:05}.out") if not os.path.exists(filename): mylog.warning("This output has no cooling fields") diff --git a/yt/frontends/stream/io.py b/yt/frontends/stream/io.py index 3d422992e53..1a73da4c2f0 100644 --- a/yt/frontends/stream/io.py +++ b/yt/frontends/stream/io.py @@ -300,7 +300,7 @@ def _read_fluid_selection(self, chunks, selector, fields, size): for g in objs: ds = self.fields[g.mesh_id].get(field, None) if ds is None: - f = ("connect%d" % (g.mesh_id + 1), fname) + f = (f"connect{(g.mesh_id + 1)}", fname) ds = self.fields[g.mesh_id][f] ind += g.select(selector, ds, rv[field], ind) # caches rv[field] = rv[field][:ind] diff --git a/yt/frontends/ytdata/data_structures.py b/yt/frontends/ytdata/data_structures.py index 17c12614662..2e2a27da569 100644 --- a/yt/frontends/ytdata/data_structures.py +++ b/yt/frontends/ytdata/data_structures.py @@ -942,7 +942,7 @@ def add_child(self, child): child.parent = self def __repr__(self): - return "Clump[%d]" % self.clump_id + return f"Clump[{self.clump_id}]" def __getitem__(self, field): g = self.ds.data diff --git a/yt/funcs.py b/yt/funcs.py index 41a67a55f7e..4351a400955 100644 --- a/yt/funcs.py +++ b/yt/funcs.py @@ -137,7 +137,7 @@ def humanize_time(secs): """ mins, secs = divmod(secs, 60) hours, mins = divmod(mins, 60) - return "%02d:%02d:%02d" % (hours, mins, secs) + return ":".join(f"{t:02}" for t in (hours, mins, secs)) # @@ -695,11 +695,9 @@ def parallel_profile(prefix): """ import cProfile - fn = "%s_%04i_%04i.cprof" % ( - prefix, - ytcfg.get("yt", "internals", "topcomm_parallel_size"), - ytcfg.get("yt", "internals", "topcomm_parallel_rank"), - ) + topcomm_parallel_size = ytcfg.get("yt", "internals", "topcomm_parallel_size") + topcomm_parallel_rank = ytcfg.get("yt", "internals", "topcomm_parallel_rank") + fn = f"{prefix}_{topcomm_parallel_size:04}_{topcomm_parallel_rank}.cprof" p = cProfile.Profile() p.enable() yield fn diff --git a/yt/geometry/grid_geometry_handler.py b/yt/geometry/grid_geometry_handler.py index da69dcab24a..a90bcf3131b 100644 --- a/yt/geometry/grid_geometry_handler.py +++ b/yt/geometry/grid_geometry_handler.py @@ -208,19 +208,17 @@ def print_stats(self): if (self.level_stats["numgrids"][level]) == 0: continue print( - "% 3i\t% 6i\t% 14i\t% 14i" - % ( - level, - self.level_stats["numgrids"][level], - self.level_stats["numcells"][level], - np.ceil(self.level_stats["numcells"][level] ** (1.0 / 3)), - ) + f"{level:>3}\t" + f"{self.level_stats['numgrids'][level]:>6}\t" + f"{self.level_stats['numcells'][level]:>14}\t" + f"{np.ceil(self.level_stats['numcells'][level] ** (1.0 / 3)):>14}" ) dx = self.select_grids(level)[0].dds[0] print("-" * 46) print( - " \t% 6i\t% 14i" - % (self.level_stats["numgrids"].sum(), self.level_stats["numcells"].sum()) + " \t" + f"{self.level_stats['numgrids'].sum():>6}\t" + f"{self.level_stats['numcells'].sum():>14}" ) print("\n") try: diff --git a/yt/geometry/tests/test_particle_octree.py b/yt/geometry/tests/test_particle_octree.py index b6724902707..34a0bf33b2b 100644 --- a/yt/geometry/tests/test_particle_octree.py +++ b/yt/geometry/tests/test_particle_octree.py @@ -202,8 +202,8 @@ def test_bitmap_no_collisions(): mask = reg.masks.sum(axis=1).astype("uint8") ncoll = np.sum(mask > 1) nc, nm = reg.find_collisions_coarse() - assert_equal(nc, 0, "%d coarse collisions" % nc) - assert_equal(ncoll, nc, "%d in mask, %d in bitmap" % (ncoll, nc)) + assert_equal(nc, 0, f"{nc} coarse collisions") + assert_equal(ncoll, nc, f"{ncoll} in mask, {nc} in bitmap") # Refined index sub_mi1 = np.zeros(max_npart, "uint64") sub_mi2 = np.zeros(max_npart, "uint64") @@ -225,7 +225,7 @@ def test_bitmap_no_collisions(): reg.bitmasks.append(i, coll) assert_equal(reg.count_refined(i), 0) nr, nm = reg.find_collisions_refined() - assert_equal(nr, 0, "%d collisions" % nr) + assert_equal(nr, 0, f"{nr} collisions") def test_bitmap_collisions(): @@ -253,8 +253,8 @@ def test_bitmap_collisions(): mask = reg.masks.sum(axis=1).astype("uint8") ncoll = np.sum(mask > 1) nc, nm = reg.find_collisions_coarse() - assert_equal(ncoll, nc, "%d in mask, %d in bitmap" % (ncoll, nc)) - assert_equal(nc, 2 ** (3 * order1), "%d coarse collisions" % nc) + assert_equal(ncoll, nc, f"{ncoll} in mask, {nc} in bitmap") + assert_equal(nc, 2 ** (3 * order1), f"{nc} coarse collisions") # Refined index sub_mi1 = np.zeros(max_npart, "uint64") sub_mi2 = np.zeros(max_npart, "uint64") @@ -274,7 +274,7 @@ def test_bitmap_collisions(): reg.bitmasks.append(i, coll) assert_equal(reg.count_refined(i), ncoll) nr, nm = reg.find_collisions_refined() - assert_equal(nr, 2 ** (3 * (order1 + order2)), "%d collisions" % nr) + assert_equal(nr, 2 ** (3 * (order1 + order2)), f"{nr} collisions") @requires_module("h5py") diff --git a/yt/loaders.py b/yt/loaders.py index f13e3180fc4..f057026f391 100644 --- a/yt/loaders.py +++ b/yt/loaders.py @@ -1467,7 +1467,7 @@ def load_unstructured_mesh( fluid_types = ["all"] for i in range(1, num_meshes + 1): - fluid_types += ["connect%d" % i] + fluid_types += [f"connect{i}"] sds.fluid_types = tuple(fluid_types) def flatten(l): diff --git a/yt/utilities/command_line.py b/yt/utilities/command_line.py index 459406d4d0a..1e52d2c2db3 100644 --- a/yt/utilities/command_line.py +++ b/yt/utilities/command_line.py @@ -231,7 +231,7 @@ def __call__(self, parser, namespace, values, option_string=None): datasets = values elif len(values) == 2 and namespace.basename is not None: datasets = [ - "%s%04i" % (namespace.basename, r) + f"{namespace.basename}{r:04}" for r in range(int(values[0]), int(values[1]), namespace.skip) ] else: @@ -1398,7 +1398,7 @@ def __call__(self, args): candidates = [] for base, dirs, files in os.walk(".", followlinks=True): - print("(% 10i candidates) Examining %s" % (len(candidates), base)) + print(f"({len(candidates):>10} candidates) Examining {base}") recurse = [] if args.check_all: candidates.extend([os.path.join(base, _) for _ in files]) @@ -1412,7 +1412,7 @@ def __call__(self, args): # and try to load each one. records = [] for i, c in enumerate(sorted(candidates)): - print("(% 10i/% 10i) Evaluating %s" % (i, len(candidates), c)) + print(f"({i:>10}/{len(candidates):>10}) Evaluating {c}") try: record = get_metadata(c, args.full_output) except YTUnidentifiedDataType: diff --git a/yt/utilities/exceptions.py b/yt/utilities/exceptions.py index fdee68afa6d..0f7fb840cb9 100644 --- a/yt/utilities/exceptions.py +++ b/yt/utilities/exceptions.py @@ -632,9 +632,8 @@ def __init__(self, geometry): def __str__(self): return ( - """Unknown geometry %i. Please refer to GDF standard - for more information""" - % self.geometry + f"Unknown geometry {self.geometry} . " + "Please refer to GDF standard for more information" ) diff --git a/yt/utilities/grid_data_format/conversion/conversion_athena.py b/yt/utilities/grid_data_format/conversion/conversion_athena.py index 17516218ba7..ebb88b4f6f4 100644 --- a/yt/utilities/grid_data_format/conversion/conversion_athena.py +++ b/yt/utilities/grid_data_format/conversion/conversion_athena.py @@ -28,7 +28,7 @@ def __init__(self, basename, outname=None, source_dir=None, field_conversions=No self.source_dir = source_dir self.basename = name[0] if outname is None: - outname = self.basename + ".%04i" % self.ddn + ".gdf" + outname = f"{self.basename}.{self.ddn:04}.gdf" self.outname = outname if field_conversions is None: field_conversions = {} @@ -68,10 +68,10 @@ def parse_line(self, line, grid): def write_gdf_field(self, fn, grid_number, field, data): f = self.handle ## --------- Store Grid Data --------- ## - if "grid_%010i" % grid_number not in f["data"].keys(): - g = f["data"].create_group("grid_%010i" % grid_number) + if (group_name := f"grid_{grid_number:010}") not in f["data"].keys(): + g = f["data"].create_group(group_name) else: - g = f["data"]["grid_%010i" % grid_number] + g = f["data"][group_name] name = field try: name = translation_dict[name] @@ -130,8 +130,8 @@ def read_and_write_index(self, basename, ddn, gdf_name): grid["dimensions"][grid["dimensions"] == 0] = 1 if np.prod(grid["dimensions"]) != grid["ncells"]: print( - "product of dimensions %i not equal to number of cells %i" - % (np.prod(grid["dimensions"]), grid["ncells"]) + f"product of dimensions {np.prod(grid['dimensions'])} " + f"not equal to number of cells {grid['ncells']}" ) raise TypeError @@ -235,8 +235,8 @@ def read_and_write_data(self, basename, ddn, gdf_name): grid_dims[grid_dims == 0] = 1 if np.prod(grid_dims) != grid_ncells: print( - "product of dimensions %i not equal to number of cells %i" - % (np.prod(grid_dims), grid_ncells) + f"product of dimensions {np.prod(grid_dims)} " + f"not equal to number of cells {grid_ncells}" ) raise TypeError break @@ -312,7 +312,7 @@ def __init__(self, basename, outname=None, field_conversions=None): self.fields = [] self.basename = basename name = basename.split(".") - fn = "%s.%04i" % (name[0], int(name[1])) + fn = f"{name[0]}.{int(name[1]):04}" self.ddn = int(name[1]) self.basename = fn if outname is None: @@ -373,8 +373,8 @@ def read_grid(self, filename): grid["dimensions"] -= 1 if np.prod(grid["dimensions"]) != grid["ncells"]: print( - "product of dimensions %i not equal to number of cells %i" - % (np.prod(grid["dimensions"]), grid["ncells"]) + f"product of dimensions {np.prod(grid['dimensions'])} " + f"not equal to number of cells {grid['ncells']}" ) raise TypeError @@ -450,7 +450,7 @@ def write_to_gdf(self, fn, grid): ## --------- Store Grid Data --------- ## - g0 = data_g.create_group("grid_%010i" % 0) + g0 = data_g.create_group("grid_{0:010}") for field in self.fields: name = field if field in translation_dict.keys(): diff --git a/yt/utilities/grid_data_format/writer.py b/yt/utilities/grid_data_format/writer.py index 5391dff592b..c9fac41c0d9 100644 --- a/yt/utilities/grid_data_format/writer.py +++ b/yt/utilities/grid_data_format/writer.py @@ -163,7 +163,7 @@ def _write_fields_to_gdf( fi = ds._get_field_info(field) ftype, fname = fi.name - grid_group = g["grid_%010i" % (grid.id - grid._id_offset)] + grid_group = g[f"grid_{grid.id - grid._id_offset:010}"] particles_group = grid_group["particles"] pt_group = particles_group[particle_type_name] @@ -190,7 +190,7 @@ def _write_fields_to_gdf( for k, v in field_parameters.items(): grid.set_field_parameter(k, v) - grid_group = g["grid_%010i" % (grid.id - grid._id_offset)] + grid_group = g[f"grid_{grid.id - grid._id_offset:010}"] particles_group = grid_group["particles"] pt_group = particles_group[particle_type_name] # add the field data to the grid group @@ -345,7 +345,7 @@ def _create_new_gdf( g = f.create_group("data") for grid in ds.index.grids: # add group for this grid - grid_group = g.create_group("grid_%010i" % (grid.id - grid._id_offset)) + grid_group = g.create_group(f"grid_{grid.id - grid._id_offset:010}") # add group for the particles on this grid particles_group = grid_group.create_group("particles") particles_group.create_group(particle_type_name) diff --git a/yt/utilities/io_handler.py b/yt/utilities/io_handler.py index e58dbc93450..9a5bd9c123c 100644 --- a/yt/utilities/io_handler.py +++ b/yt/utilities/io_handler.py @@ -64,7 +64,7 @@ def _field_in_backup(self, grid, backup_file, field_name): if os.path.exists(backup_file): fhandle = h5py.File(backup_file, mode="r") g = fhandle["data"] - grid_group = g["grid_%010i" % (grid.id - grid._id_offset)] + grid_group = g[f"grid_{grid.id - grid._id_offset:010}"] if field_name in grid_group: return_val = True else: @@ -83,7 +83,7 @@ def _read_data_set(self, grid, field): elif self._field_in_backup(grid, backup_filename, field): fhandle = h5py.File(backup_filename, mode="r") g = fhandle["data"] - grid_group = g["grid_%010i" % (grid.id - grid._id_offset)] + grid_group = g[f"grid_{grid.id - grid._id_offset:010}"] data = grid_group[field][:] fhandle.close() return data diff --git a/yt/utilities/lib/cykdtree/plot.py b/yt/utilities/lib/cykdtree/plot.py index 414b2a8f388..ba5a60a362d 100644 --- a/yt/utilities/lib/cykdtree/plot.py +++ b/yt/utilities/lib/cykdtree/plot.py @@ -162,7 +162,7 @@ def plot2D_serial(tree, pts=None, label_boxes=False, **kwargs): if label_boxes: txt = [] for leaf in tree.leaves: - txt.append((leaf.left_edge[0], leaf.left_edge[1], "%d" % leaf.id)) + txt.append((leaf.left_edge[0], leaf.left_edge[1], str(leaf.id))) # Return axes return _plot2D_root(seg, pts=pts, txt=txt, **kwargs) diff --git a/yt/utilities/mesh_code_generation.py b/yt/utilities/mesh_code_generation.py index 3127f781c9c..9452ad28d5b 100644 --- a/yt/utilities/mesh_code_generation.py +++ b/yt/utilities/mesh_code_generation.py @@ -98,11 +98,11 @@ def _compute_jacobian(self): self.scol = MatrixSymbol("scol", self.num_dim, 1) self.tcol = MatrixSymbol("tcol", self.num_dim, 1) - self.function_name = "%sFunction%dD" % (self.mesh_type, self.num_dim) + self.function_name = f"{self.mesh_type}Function{self.num_dim}D" self.function_header = fun_def_template % self.function_name self.function_declaration = fun_dec_template % self.function_name - self.jacobian_name = "%sJacobian%dD" % (self.mesh_type, self.num_dim) + self.jacobian_name = f"{self.mesh_type}Jacobian{self.num_dim}D" if self.num_dim == 3: self.jacobian_header = jac_def_template_3D % self.jacobian_name diff --git a/yt/utilities/parallel_tools/parallel_analysis_interface.py b/yt/utilities/parallel_tools/parallel_analysis_interface.py index 05288cbf43d..4cd553c601e 100644 --- a/yt/utilities/parallel_tools/parallel_analysis_interface.py +++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py @@ -127,14 +127,12 @@ def enable_parallelism(suppress_logging: bool = False, communicator=None) -> boo yt.utilities.logger.uncolorize_logging() # Even though the uncolorize function already resets the format string, # we reset it again so that it includes the processor. - f = logging.Formatter( - "P%03i %s" % (communicator.rank, yt.utilities.logger.ufstring) - ) + f = logging.Formatter(f"P{communicator.rank:03} {yt.utilities.logger.ufstring}") if len(yt.utilities.logger.ytLogger.handlers) > 0: yt.utilities.logger.ytLogger.handlers[0].setFormatter(f) if ytcfg.get("yt", "parallel_traceback"): - sys.excepthook = traceback_writer_hook("_%03i" % communicator.rank) + sys.excepthook = traceback_writer_hook(f"_{communicator.rank:03}") else: sys.excepthook = default_mpi_excepthook @@ -448,7 +446,7 @@ def from_sizes(cls, sizes): if is_sequence(size): size, name = size else: - name = "workgroup_%02i" % i + name = f"workgroup_{i:02}" pool.add_workgroup(size, name=name) for wg in pool.workgroups: if rank in wg.ranks: @@ -1015,10 +1013,8 @@ def write_on_root(self, fn): def get_filename(self, prefix, rank=None): if not self._distributed: return prefix - if rank is None: - return "%s_%04i" % (prefix, self.comm.rank) else: - return "%s_%04i" % (prefix, rank) + return f"{prefix}_{rank or self.comm.rank:04}" def is_mine(self, obj): if not obj._distributed: diff --git a/yt/utilities/performance_counters.py b/yt/utilities/performance_counters.py index 5dcfc5ac579..2e7f864f843 100644 --- a/yt/utilities/performance_counters.py +++ b/yt/utilities/performance_counters.py @@ -70,24 +70,13 @@ def print_stats(self): if i[2] == 0: shift -= 1 endtimes[i[1]] = self.counters[i[1]] - line = "" + line_fragments: list[str] = [] for i in order: - if self.counting[i]: - line = "%s%s%i : %s : still running\n" % ( - line, - " " * shifts[i] * multi, - shifts[i], - i, - ) - else: - line = "%s%s%i : %s : %0.3e\n" % ( - line, - " " * shifts[i] * multi, - shifts[i], - i, - self.counters[i], - ) - mylog.info("\n%s", line) + line_fragments.append( + f"{' ' * shifts[i] * multi}{shifts[i]} : {i} : " + f"{'still running' if self.counting[i] else f'{self.counters[i]:0.3e}'}\n" + ) + mylog.info("\n%s", "".join(line_fragments)) def exit(self): if self._on: @@ -122,14 +111,12 @@ def run_in_profiler(*args, **kwargs): return wrapper def write_out(self, filename_prefix): + pfn = str(filename_prefix) if ytcfg.get("yt", "internals", "parallel"): - pfn = "%s_%03i_%03i" % ( - filename_prefix, - ytcfg.get("yt", "internals", "global_parallel_rank"), - ytcfg.get("yt", "internals", "global_parallel_size"), - ) - else: - pfn = f"{filename_prefix}" + global_parallel_rank = ytcfg.get("yt", "internals", "global_parallel_rank") + global_parallel_size = ytcfg.get("yt", "internals", "global_parallel_size") + pfn += f"_{global_parallel_rank:03}_{global_parallel_size:03}" + for n, p in sorted(self.profilers.items()): fn = f"{pfn}_{n}.cprof" mylog.info("Dumping %s into %s", n, fn) diff --git a/yt/utilities/sdf.py b/yt/utilities/sdf.py index acf7e1a0ba6..d4f25bfb7c4 100644 --- a/yt/utilities/sdf.py +++ b/yt/utilities/sdf.py @@ -340,7 +340,7 @@ def write(self, filename): k, v = var[0], _rev_types[var[1]] to_write.append(k) f.write(f"\t{v} {k};\n") - f.write("}[%i];\n" % s.size) + f.write(f"}}[{s.size}];\n") struct_order.append(to_write) f.write("#\x0c\n") f.write("# SDF-EOH\n") @@ -1141,7 +1141,7 @@ def get_contiguous_chunk(self, left_key, right_key, fields): lbase = 0 if left_key > self._max_key: raise RuntimeError( - "Left key is too large. Key: %i Max Key: %i" % (left_key, self._max_key) + f"Left key is too large. Key: {left_key} Max Key: {self._max_key}" ) right_key = min(right_key, self._max_key) @@ -1163,7 +1163,7 @@ def get_contiguous_chunk(self, left_key, right_key, fields): def get_key_data(self, key, fields): if key > self._max_key: raise RuntimeError( - "Left key is too large. Key: %i Max Key: %i" % (key, self._max_key) + f"Left key is too large. Key: {key} Max Key: {self._max_key}" ) base = self.indexdata["base"][key] length = self.indexdata["len"][key] - base diff --git a/yt/utilities/tests/test_cosmology.py b/yt/utilities/tests/test_cosmology.py index e44842045ae..69bcffa4597 100644 --- a/yt/utilities/tests/test_cosmology.py +++ b/yt/utilities/tests/test_cosmology.py @@ -62,7 +62,7 @@ def z_from_t_analytic(my_time, hubble_constant=0.7, omega_matter=0.3, omega_lamb if not mask.any(): break if i == max_iter - 1: - raise RuntimeError("No convergence after %d iterations." % i) + raise RuntimeError(f"No convergence after {i} iterations.") # Now use eta to compute the expansion factor (eq. 13-10, part 2). diff --git a/yt/visualization/eps_writer.py b/yt/visualization/eps_writer.py index 94420410236..bf1bafd9e59 100644 --- a/yt/visualization/eps_writer.py +++ b/yt/visualization/eps_writer.py @@ -1280,8 +1280,8 @@ def multiplot( if images is not None: if len(images) != npanels: raise RuntimeError( - "Number of images (%d) doesn't match nrow(%d)" - " x ncol(%d)." % (len(images), nrow, ncol) + f"Number of images ({len(images)}) doesn't match " + f"nrow({nrow}) x ncol({ncol})." ) if yt_plots is None and images is None: raise RuntimeError("Must supply either yt_plots or image filenames.") diff --git a/yt/visualization/plot_modifications.py b/yt/visualization/plot_modifications.py index 0e658776a39..3062ab8a196 100644 --- a/yt/visualization/plot_modifications.py +++ b/yt/visualization/plot_modifications.py @@ -1248,7 +1248,7 @@ def __call__(self, plot): "'upper left', and 'upper right'." ) xi, yi = self._sanitize_xy_order(plot, x[i], y[i]) - plot._axes.text(xi, yi, "%d" % block_ids[n], clip_on=True) + plot._axes.text(xi, yi, str(block_ids[n]), clip_on=True) # when type-checking with MPL >= 3.8, use @@ -3261,7 +3261,7 @@ def __call__(self, plot): elif self.texture.shape != (nx, ny): raise ValueError( "'texture' must have the same shape " - "with that of output image (%d, %d)" % (nx, ny) + f"with that of output image ({nx}, {ny})" ) kernel = np.sin( diff --git a/yt/visualization/tests/test_image_comp_2D_plots.py b/yt/visualization/tests/test_image_comp_2D_plots.py index b7f99100fce..67fa005bac0 100644 --- a/yt/visualization/tests/test_image_comp_2D_plots.py +++ b/yt/visualization/tests/test_image_comp_2D_plots.py @@ -419,7 +419,7 @@ class TestCylindricalZSlicePlot: def setup_class(cls): cls.ds = fake_amr_ds(geometry="cylindrical") add_noise_fields(cls.ds) - fields = ["noise%d" % i for i in range(4)] + fields = [f"noise{i}" for i in range(4)] cls.plot = SlicePlot(cls.ds, "z", fields) @pytest.mark.parametrize("field", ["noise0", "noise1", "noise2", "noise3"]) @@ -467,7 +467,7 @@ class TestSphericalPhiSlicePlot: def setup_class(cls): cls.ds = fake_amr_ds(geometry="spherical") add_noise_fields(cls.ds) - fields = ["noise%d" % i for i in range(4)] + fields = [f"noise{i}" for i in range(4)] cls.plot = SlicePlot(cls.ds, "phi", fields) @pytest.mark.parametrize("field", ["noise0", "noise1", "noise2", "noise3"]) @@ -481,7 +481,7 @@ class TestSphericalThetaSlicePlot: def setup_class(cls): cls.ds = fake_amr_ds(geometry="spherical") add_noise_fields(cls.ds) - fields = ["noise%d" % i for i in range(4)] + fields = [f"noise{i}" for i in range(4)] cls.plot = SlicePlot(cls.ds, "theta", fields) @pytest.mark.parametrize("field", ["noise0", "noise1", "noise2", "noise3"]) diff --git a/yt/visualization/volume_rendering/camera_path.py b/yt/visualization/volume_rendering/camera_path.py index 03ef19b5649..9ad14c75be6 100644 --- a/yt/visualization/volume_rendering/camera_path.py +++ b/yt/visualization/volume_rendering/camera_path.py @@ -78,7 +78,7 @@ def __init__( Nz = 1 ndims = 2 if Nx * Ny * Nz != Nx**ndims: - print("Need Nx (%d) == Ny (%d) == Nz (%d)" % (Nx, Ny, Nz)) + print(f"Need Nx ({Nx}) == Ny ({Ny}) == Nz ({Nz})") raise RuntimeError self.nframes = Nx self.pos = np.zeros((Nx, 3)) @@ -315,22 +315,28 @@ def write_path(self, filename="path.dat"): Filename containing the camera path. Default: path.dat """ fp = open(filename, "w") - fp.write( - "#%11s %12s %12s %12s %12s %12s %12s %12s %12s\n" - % ("x", "y", "z", "north_x", "north_y", "north_z", "up_x", "up_y", "up_z") - ) + fields = [ + "y", + "z", + "north_x", + "north_y", + "north_z", + "up_x", + "up_y", + "up_z", + ] + fp.write(f"#{'x':>11}" + " ".join(f"{s:>12}" for s in fields) + "\n") for i in range(self.npoints): - fp.write( - "{:.12f} {:.12f} {:.12f} {:.12f} {:.12f} {:.12f} {:.12f} {:.12f} {:.12f}\n".format( - self.path["position"][i, 0], - self.path["position"][i, 1], - self.path["position"][i, 2], - self.path["north_vectors"][i, 0], - self.path["north_vectors"][i, 1], - self.path["north_vectors"][i, 2], - self.path["up_vectors"][i, 0], - self.path["up_vectors"][i, 1], - self.path["up_vectors"][i, 2], - ) - ) + values = [ + self.path["position"][i, 0], + self.path["position"][i, 1], + self.path["position"][i, 2], + self.path["north_vectors"][i, 0], + self.path["north_vectors"][i, 1], + self.path["north_vectors"][i, 2], + self.path["up_vectors"][i, 0], + self.path["up_vectors"][i, 1], + self.path["up_vectors"][i, 2], + ] + fp.write(" ".join(f"{v:.12f}" for v in values) + "\n") fp.close() diff --git a/yt/visualization/volume_rendering/old_camera.py b/yt/visualization/volume_rendering/old_camera.py index d1b50ea952d..00618bbc1f7 100644 --- a/yt/visualization/volume_rendering/old_camera.py +++ b/yt/visualization/volume_rendering/old_camera.py @@ -1208,7 +1208,7 @@ def save(self, fn): def save_frames(self, basename, clip_ratio=None): for i, frame in enumerate(self.frames): - fn = basename + "_%04i.png" % i + fn = f"{basename}_{i:04}.png" if clip_ratio is not None: write_bitmap(frame, fn, clip_ratio * frame.std()) else: @@ -2050,7 +2050,7 @@ def __init__( # This is a temporary field, which we will remove at the end # it is given a unique name to avoid conflicting with other # class instances - self.weightfield = ("index", "temp_weightfield_%u" % (id(self),)) + self.weightfield = ("index", f"temp_weightfield_{id(self)}") def _make_wf(f, w): def temp_weightfield(field, data): diff --git a/yt/visualization/volume_rendering/scene.py b/yt/visualization/volume_rendering/scene.py index 0e11fe53b18..22dd410c5ed 100644 --- a/yt/visualization/volume_rendering/scene.py +++ b/yt/visualization/volume_rendering/scene.py @@ -127,7 +127,7 @@ def add_source(self, render_source, keyname=None): dictionary. """ if keyname is None: - keyname = "source_%02i" % len(self.sources) + keyname = f"source_{len(self.sources):02}" data_sources = (VolumeSource, MeshSource, GridSource) diff --git a/yt/visualization/volume_rendering/transfer_functions.py b/yt/visualization/volume_rendering/transfer_functions.py index ee0efa01e80..98991a814be 100644 --- a/yt/visualization/volume_rendering/transfer_functions.py +++ b/yt/visualization/volume_rendering/transfer_functions.py @@ -910,8 +910,8 @@ def clear(self): def __repr__(self): disp = ( ":\n" - + "x_bounds:[%3.2g, %3.2g] nbins:%i features:\n" - % (self.x_bounds[0], self.x_bounds[1], self.nbins) + f"x_bounds:[{self.x_bounds[0]:3.2g}, {self.x_bounds[1]:3.2g}] " + f"nbins:{self.nbins} features:\n" ) for f in self.features: disp += f"\t{str(f)}\n" From 147548d3c92376cc84784a2744697ec7dbf45164 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Tue, 26 Nov 2024 09:44:02 +0100 Subject: [PATCH 2/3] fixup! STY: manual fixes for newly flagged violations of UP031 --- yt/data_objects/construction_data_containers.py | 2 +- yt/frontends/athena/data_structures.py | 3 +-- yt/frontends/enzo/data_structures.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/yt/data_objects/construction_data_containers.py b/yt/data_objects/construction_data_containers.py index 0681b08e582..e80308047d2 100644 --- a/yt/data_objects/construction_data_containers.py +++ b/yt/data_objects/construction_data_containers.py @@ -2612,7 +2612,7 @@ def _export_ply( ) else: v = np.empty(self.vertices.shape[1], dtype=vs[:3]) - line = f"element face {nv/3}\n" + line = f"element face {int(nv/3)}\n" f.write(line.encode("latin-1")) f.write(b"property list uchar int vertex_indices\n") if color_field is not None and sample_type == "face": diff --git a/yt/frontends/athena/data_structures.py b/yt/frontends/athena/data_structures.py index 79683cd9f9c..f72b897277e 100644 --- a/yt/frontends/athena/data_structures.py +++ b/yt/frontends/athena/data_structures.py @@ -297,8 +297,7 @@ def _parse_index(self): gridread["dimensions"][gridread["dimensions"] == 0] = 1 if np.prod(gridread["dimensions"]) != gridread["ncells"]: mylog.error( - f"product of dimensions {np.prod(grid['dimensions'])} " - f"not equal to number of cells {grid['ncells']}", + "product of dimensions %i not equal to number of cells %i", np.prod(gridread["dimensions"]), gridread["ncells"], ) diff --git a/yt/frontends/enzo/data_structures.py b/yt/frontends/enzo/data_structures.py index 6f2d6cd0e07..2c7dcfc27ea 100644 --- a/yt/frontends/enzo/data_structures.py +++ b/yt/frontends/enzo/data_structures.py @@ -574,7 +574,7 @@ def _parse_index(self): self.grids = np.empty(len(grids), dtype="object") for i, grid in enumerate(grids): if (i % 1e4) == 0: - mylog.debug(f"Prepared {i:>7} / {self.num_grids:>7} grids") + mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids) grid.filename = f"Inline_processor_{self.grid_procs[i, 0]:07}" grid._prepare_grid() grid._setup_dx() From 96fe344fc58b2663f87d2b184ff067e4441d02ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Wed, 27 Nov 2024 08:26:22 +0100 Subject: [PATCH 3/3] Update yt/geometry/grid_geometry_handler.py Co-authored-by: Chris Havlin --- yt/geometry/grid_geometry_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yt/geometry/grid_geometry_handler.py b/yt/geometry/grid_geometry_handler.py index a90bcf3131b..4d4c05d03a0 100644 --- a/yt/geometry/grid_geometry_handler.py +++ b/yt/geometry/grid_geometry_handler.py @@ -211,7 +211,7 @@ def print_stats(self): f"{level:>3}\t" f"{self.level_stats['numgrids'][level]:>6}\t" f"{self.level_stats['numcells'][level]:>14}\t" - f"{np.ceil(self.level_stats['numcells'][level] ** (1.0 / 3)):>14}" + f"{int(np.ceil(self.level_stats['numcells'][level] ** (1.0 / 3))):>14}" ) dx = self.select_grids(level)[0].dds[0] print("-" * 46)