From 43f9714b0434c6c1c9d0dbe2692829ab8834aac8 Mon Sep 17 00:00:00 2001 From: Yann Leprince Date: Tue, 2 Jul 2024 16:40:41 +0200 Subject: [PATCH] upgrade old idioms with pyupgrade --- docs/conf.py | 1 - experimental/mesh_to_vtk.py | 10 +++---- experimental/off_to_vtk.py | 12 ++++---- pyproject.toml | 1 + script_tests/test_scripts.py | 14 ++++----- .../_compressed_segmentation.py | 6 ++-- src/neuroglancer_scripts/_jpeg.py | 17 +++++------ src/neuroglancer_scripts/accessor.py | 4 +-- src/neuroglancer_scripts/chunk_encoding.py | 18 +++++------ src/neuroglancer_scripts/data_types.py | 2 +- src/neuroglancer_scripts/downscaling.py | 4 +-- src/neuroglancer_scripts/dyadic_pyramid.py | 12 ++++---- src/neuroglancer_scripts/file_accessor.py | 30 +++++++++---------- src/neuroglancer_scripts/http_accessor.py | 7 ++--- src/neuroglancer_scripts/mesh.py | 25 ++++++++-------- .../scripts/convert_chunks.py | 4 +-- .../scripts/mesh_to_precomputed.py | 6 ++-- .../scripts/scale_stats.py | 13 ++++---- .../scripts/slices_to_precomputed.py | 10 +++---- .../scripts/volume_to_precomputed_pyramid.py | 2 +- src/neuroglancer_scripts/sharded_base.py | 2 +- .../sharded_file_accessor.py | 2 +- src/neuroglancer_scripts/utils.py | 2 +- src/neuroglancer_scripts/volume_reader.py | 15 ++++------ 24 files changed, 105 insertions(+), 114 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f5466aa..ef6c940 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # # neuroglancer-scripts documentation build configuration file, created by # sphinx-quickstart on Fri Feb 2 15:05:24 2018. diff --git a/experimental/mesh_to_vtk.py b/experimental/mesh_to_vtk.py index 6c15455..3406b2b 100755 --- a/experimental/mesh_to_vtk.py +++ b/experimental/mesh_to_vtk.py @@ -15,7 +15,7 @@ def mesh_file_to_vtk(input_filename, output_filename, data_format="ascii", coord_transform=None): """Convert a mesh file read by nibabel to VTK format""" - print("Reading {}".format(input_filename)) + print(f"Reading {input_filename}") mesh = nibabel.load(input_filename) print() print("Summary") @@ -44,7 +44,7 @@ def mesh_file_to_vtk(input_filename, output_filename, data_format="ascii", # Gifti uses millimetres, Neuroglancer expects nanometres points *= 1e6 - with open(output_filename, "wt") as output_file: + with open(output_filename, "w") as output_file: neuroglancer_scripts.mesh.save_mesh_as_neuroglancer_vtk( output_file, points, triangles ) @@ -78,15 +78,15 @@ def parse_command_line(argv): try: matrix = np.fromstring(args.coord_transform, sep=",") except ValueError as exc: - parser.error("cannot parse --coord-transform: {}" - .format(exc.args[0])) + parser.error(f"cannot parse --coord-transform: {exc.args[0]}" + ) if len(matrix) == 12: matrix = matrix.reshape(3, 4) elif len(matrix) == 16: matrix = matrix.reshape(4, 4) else: parser.error("--coord-transform must have 12 or 16 elements" - " ({} passed)".format(len(matrix))) + f" ({len(matrix)} passed)") args.coord_transform = matrix diff --git a/experimental/off_to_vtk.py b/experimental/off_to_vtk.py index 9402cb4..bb56528 100755 --- a/experimental/off_to_vtk.py +++ b/experimental/off_to_vtk.py @@ -19,7 +19,7 @@ def off_mesh_file_to_vtk(input_filename, output_filename, data_format="binary", coord_transform=None): """Convert a mesh file from OFF format to VTK format""" - print("Reading {}".format(input_filename)) + print(f"Reading {input_filename}") with gzip.open(input_filename, "rt") as f: header_keyword = f.readline().strip() match = re.match(r"(ST)?(C)?(N)?(4)?(n)?OFF", header_keyword) @@ -48,8 +48,8 @@ def off_mesh_file_to_vtk(input_filename, output_filename, data_format="binary", triangles[i, 1] = float(components[2]) triangles[i, 2] = float(components[3]) print() - print("{0} vertices and {1} triangles read" - .format(num_vertices, num_triangles)) + print(f"{num_vertices} vertices and {num_triangles} triangles read" + ) points = vertices @@ -108,15 +108,15 @@ def parse_command_line(argv): try: matrix = np.fromstring(args.coord_transform, sep=",") except ValueError as exc: - parser.error("cannot parse --coord-transform: {}" - .format(exc.args[0])) + parser.error(f"cannot parse --coord-transform: {exc.args[0]}" + ) if len(matrix) == 12: matrix = matrix.reshape(3, 4) elif len(matrix) == 16: matrix = matrix.reshape(4, 4) else: parser.error("--coord-transform must have 12 or 16 elements" - " ({} passed)".format(len(matrix))) + f" ({len(matrix)} passed)") args.coord_transform = matrix diff --git a/pyproject.toml b/pyproject.toml index 1fdb591..3103c98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ extend-select = [ "I", "N", "NPY", + "UP", ] ignore = [ "N802", # Gives false positives when a name contains an uppercase acronym diff --git a/script_tests/test_scripts.py b/script_tests/test_scripts.py index cfbac7e..24230a1 100644 --- a/script_tests/test_scripts.py +++ b/script_tests/test_scripts.py @@ -34,8 +34,8 @@ def test_jubrain_example_MPM(examples_dir, tmpdir): try: gzip.open(str(input_nifti)).read(348) except OSError as exc: - pytest.skip("Cannot find a valid example file {0} for testing: {1}" - .format(input_nifti, exc)) + pytest.skip(f"Cannot find a valid example file {input_nifti} for " + f"testing: {exc}") output_dir = tmpdir / "MPM" assert subprocess.call([ @@ -80,8 +80,8 @@ def test_all_in_one_conversion(examples_dir, tmpdir): try: gzip.open(str(input_nifti)).read(348) except OSError as exc: - pytest.skip("Cannot find a valid example file {0} for testing: {1}" - .format(input_nifti, exc)) + pytest.skip(f"Cannot find a valid example file {input_nifti} for " + f"testing: {exc}") output_dir = tmpdir / "colin27T1_seg" assert subprocess.call([ @@ -104,8 +104,8 @@ def test_sharded_conversion(examples_dir, tmpdir): try: gzip.open(str(input_nifti)).read(348) except OSError as exc: - pytest.skip("Cannot find a valid example file {0} for testing: {1}" - .format(input_nifti, exc)) + pytest.skip(f"Cannot find a valid example file {input_nifti} for " + f"testing: {exc}") output_dir = tmpdir / "colin27T1_seg_sharded" assert subprocess.call([ @@ -116,7 +116,7 @@ def test_sharded_conversion(examples_dir, tmpdir): str(output_dir) ], env=env) == 4 # datatype not supported by neuroglancer - with open(output_dir / "info_fullres.json", "r") as fp: + with open(output_dir / "info_fullres.json") as fp: fullres_info = json.load(fp=fp) with open(output_dir / "info_fullres.json", "w") as fp: fullres_info["data_type"] = "uint8" diff --git a/src/neuroglancer_scripts/_compressed_segmentation.py b/src/neuroglancer_scripts/_compressed_segmentation.py index 638ec3b..554228f 100644 --- a/src/neuroglancer_scripts/_compressed_segmentation.py +++ b/src/neuroglancer_scripts/_compressed_segmentation.py @@ -104,7 +104,7 @@ def _encode_channel(chunk_channel, block_size): def _pack_encoded_values(encoded_values, bits): if bits == 0: - return bytes() + return b"" else: assert 32 % bits == 0 assert np.array_equal(encoded_values, @@ -162,8 +162,8 @@ def _decode_channel_into(chunk, channel, buf, block_size): bits = res[0] >> 24 if bits not in (0, 1, 2, 4, 8, 16, 32): raise InvalidFormatError("Invalid number of encoding bits for " - "compressed_segmentation block ({0})" - .format(bits)) + f"compressed_segmentation block ({bits})" + ) encoded_values_offset = 4 * res[1] lookup_table_past_end = lookup_table_offset + chunk.itemsize * min( (2 ** bits), diff --git a/src/neuroglancer_scripts/_jpeg.py b/src/neuroglancer_scripts/_jpeg.py index c7440c1..cdef05f 100644 --- a/src/neuroglancer_scripts/_jpeg.py +++ b/src/neuroglancer_scripts/_jpeg.py @@ -46,17 +46,17 @@ def decode_chunk(buf, chunk_size, num_channels): img = PIL.Image.open(io_buf) except Exception as exc: raise InvalidFormatError( - "The JPEG-encoded chunk could not be decoded: {0}" - .format(exc)) from exc + f"The JPEG-encoded chunk could not be decoded: {exc}" + ) from exc if num_channels == 1 and img.mode != "L": raise InvalidFormatError( - "The JPEG chunk is encoded with mode={0} instead of L" - .format(img.mode)) + f"The JPEG chunk is encoded with mode={img.mode} instead of L" + ) if num_channels == 3 and img.mode != "RGB": raise InvalidFormatError( - "The JPEG chunk is encoded with mode={0} instead of RGB" - .format(img.mode)) + f"The JPEG chunk is encoded with mode={img.mode} instead of RGB" + ) flat_chunk = np.asarray(img) if num_channels == 3: @@ -67,7 +67,6 @@ def decode_chunk(buf, chunk_size, num_channels): chunk_size[2], chunk_size[1], chunk_size[0]) except Exception: raise InvalidFormatError("The JPEG-encoded chunk has an incompatible " - "shape ({0} elements, expecting {1})" - .format(flat_chunk.size // num_channels, - np.prod(chunk_size))) + f"shape ({flat_chunk.size // num_channels} " + f"elements, expecting {np.prod(chunk_size)})") return chunk diff --git a/src/neuroglancer_scripts/accessor.py b/src/neuroglancer_scripts/accessor.py index 3c33d17..b678a5f 100644 --- a/src/neuroglancer_scripts/accessor.py +++ b/src/neuroglancer_scripts/accessor.py @@ -88,8 +88,8 @@ def get_accessor_for_url(url, accessor_options={}): return sharded_http_accessor.ShardedHttpAccessor(url) return accessor else: - raise URLError("Unsupported URL scheme {0} (must be file, http, or " - "https)".format(r.scheme)) + raise URLError(f"Unsupported URL scheme {r.scheme} (must be file, " + "http, or https)") def add_argparse_options(parser, write_chunks=True, write_files=True): diff --git a/src/neuroglancer_scripts/chunk_encoding.py b/src/neuroglancer_scripts/chunk_encoding.py index b6d35ca..164033e 100644 --- a/src/neuroglancer_scripts/chunk_encoding.py +++ b/src/neuroglancer_scripts/chunk_encoding.py @@ -48,14 +48,14 @@ def get_encoder(info, scale_info, encoder_options={}): num_channels = info["num_channels"] encoding = scale_info["encoding"] except KeyError as exc: - raise InvalidInfoError("The info dict is missing an essential key {0}" - .format(exc)) from exc + raise InvalidInfoError("The info dict is missing an essential key " + f"{exc}") from exc if not isinstance(num_channels, int) or not num_channels > 0: - raise InvalidInfoError("Invalid value {0} for num_channels (must be " - "a positive integer)".format(num_channels)) + raise InvalidInfoError(f"Invalid value {num_channels} for " + "num_channels (must be a positive integer)") if data_type not in NEUROGLANCER_DATA_TYPES: - raise InvalidInfoError("Invalid data_type {0} (should be one of {1})" - .format(data_type, NEUROGLANCER_DATA_TYPES)) + raise InvalidInfoError(f"Invalid data_type {data_type} (should be one " + f"of {NEUROGLANCER_DATA_TYPES})") try: if encoding == "raw": return RawChunkEncoder(data_type, num_channels) @@ -75,7 +75,7 @@ def get_encoder(info, scale_info, encoder_options={}): jpeg_plane=jpeg_plane, jpeg_quality=jpeg_quality) else: - raise InvalidInfoError("Invalid encoding {0}".format(encoding)) + raise InvalidInfoError(f"Invalid encoding {encoding}") except IncompatibleEncoderError as exc: raise InvalidInfoError(str(exc)) from exc @@ -191,8 +191,8 @@ def decode(self, buf, chunk_size): (self.num_channels, chunk_size[2], chunk_size[1], chunk_size[0])) except Exception as exc: - raise InvalidFormatError("Cannot decode raw-encoded chunk: {0}" - .format(exc)) from exc + raise InvalidFormatError(f"Cannot decode raw-encoded chunk: {exc}" + ) from exc class CompressedSegmentationEncoder(ChunkEncoder): diff --git a/src/neuroglancer_scripts/data_types.py b/src/neuroglancer_scripts/data_types.py index 06ce6f3..6363ac7 100644 --- a/src/neuroglancer_scripts/data_types.py +++ b/src/neuroglancer_scripts/data_types.py @@ -87,7 +87,7 @@ def get_dtype(input_dtype): if input_dtype.names is None: return input_dtype, False if input_dtype.names not in NG_MULTICHANNEL_DATATYPES: - err = 'tuple datatype {} not yet supported'.format(input_dtype.names) + err = f'tuple datatype {input_dtype.names} not yet supported' raise NotImplementedError(err) for index, value in enumerate(input_dtype.names): err = 'Multichanneled datatype should have the same datatype' diff --git a/src/neuroglancer_scripts/downscaling.py b/src/neuroglancer_scripts/downscaling.py index 1c9e7f8..f801dd7 100644 --- a/src/neuroglancer_scripts/downscaling.py +++ b/src/neuroglancer_scripts/downscaling.py @@ -48,8 +48,8 @@ def get_downscaler(downscaling_method, info=None, options={}): elif downscaling_method == "stride": return StridingDownscaler() else: - raise NotImplementedError("invalid downscaling method {0}" - .format(downscaling_method)) + raise NotImplementedError("invalid downscaling method " + + downscaling_method) def add_argparse_options(parser): diff --git a/src/neuroglancer_scripts/dyadic_pyramid.py b/src/neuroglancer_scripts/dyadic_pyramid.py index 68dce66..a1969a8 100644 --- a/src/neuroglancer_scripts/dyadic_pyramid.py +++ b/src/neuroglancer_scripts/dyadic_pyramid.py @@ -30,8 +30,8 @@ def choose_unit_for_key(resolution_nm): and (format_length(resolution_nm, unit) != format_length(resolution_nm * 2, unit))): return unit - raise NotImplementedError("cannot find a suitable unit for {} nm" - .format(resolution_nm)) + raise NotImplementedError("cannot find a suitable unit for " + f"{resolution_nm} nm") def fill_scales_for_dyadic_pyramid(info, target_chunk_size=64, @@ -175,15 +175,15 @@ def compute_dyadic_downscaling(info, source_scale_index, downscaler, if new_size != [ceil_div(os, ds) for os, ds in zip(old_size, downscaling_factors)]: raise ValueError("Unsupported downscaling factor between scales " - "{} and {} (only 1 and 2 are supported)" - .format(old_key, new_key)) + f"{old_key} and {new_key} " + "(only 1 and 2 are supported)") downscaler.check_factors(downscaling_factors) if chunk_reader.scale_is_lossy(old_key): logger.warning( "Using data stored in a lossy format (scale %s) as an input " - "for downscaling (to scale %s)" % (old_key, new_key) + "for downscaling (to scale %s)", old_key, new_key ) half_chunk = [osz // f @@ -210,7 +210,7 @@ def load_and_downscale_old_chunk(z_idx, y_idx, x_idx): # TODO how to do progress report correctly with logging? for x_idx, y_idx, z_idx in tqdm( np.ndindex(chunk_range), total=np.prod(chunk_range), - desc="computing scale {}".format(new_key), + desc=f"computing scale {new_key}", unit="chunks", leave=True): xmin = new_chunk_size[0] * x_idx xmax = min(new_chunk_size[0] * (x_idx + 1), new_size[0]) diff --git a/src/neuroglancer_scripts/file_accessor.py b/src/neuroglancer_scripts/file_accessor.py index 1eb9571..034bff4 100644 --- a/src/neuroglancer_scripts/file_accessor.py +++ b/src/neuroglancer_scripts/file_accessor.py @@ -63,7 +63,7 @@ def file_exists(self, relative_path): return True except OSError as exc: raise DataAccessError( - "Error fetching {0}: {1}".format(file_path, exc)) from exc + f"Error fetching {file_path}: {exc}") from exc return False def fetch_file(self, relative_path): @@ -79,13 +79,13 @@ def fetch_file(self, relative_path): f = gzip.open(str(file_path.with_name(file_path.name + ".gz")), "rb") else: - raise DataAccessError("Cannot find {0} in {1}".format( - relative_path, self.base_path)) + raise DataAccessError(f"Cannot find {relative_path} in " + f"{self.base_path}") with f: return f.read() except OSError as exc: raise DataAccessError( - "Error fetching {0}: {1}".format(file_path, exc)) from exc + f"Error fetching {file_path}: {exc}") from exc def store_file(self, relative_path, buf, mime_type="application/octet-stream", @@ -107,8 +107,8 @@ def store_file(self, relative_path, buf, with file_path.open(mode) as f: f.write(buf) except OSError as exc: - raise DataAccessError("Error storing {0}: {1}" - .format(file_path, exc)) from exc + raise DataAccessError(f"Error storing {file_path}: {exc}" + ) from exc def fetch_chunk(self, key, chunk_coords): f = None @@ -124,17 +124,17 @@ def fetch_chunk(self, key, chunk_coords): ) if f is None: raise DataAccessError( - "Cannot find chunk {0} in {1}" .format( - self._flat_chunk_basename(key, chunk_coords), - self.base_path) + "Cannot find chunk " + f"{self._flat_chunk_basename(key, chunk_coords)} in " + f"{self.base_path}" ) with f: return f.read() except OSError as exc: raise DataAccessError( - "Error accessing chunk {0} in {1}: {2}" .format( - self._flat_chunk_basename(key, chunk_coords), - self.base_path, exc)) from exc + "Error accessing chunk " + f"{self._flat_chunk_basename(key, chunk_coords)} in " + f"{self.base_path}: {exc}" ) from exc def store_chunk(self, buf, key, chunk_coords, mime_type="application/octet-stream", @@ -153,9 +153,9 @@ def store_chunk(self, buf, key, chunk_coords, f.write(buf) except OSError as exc: raise DataAccessError( - "Error storing chunk {0} in {1}: {2}" .format( - self._flat_chunk_basename(key, chunk_coords), - self.base_path, exc)) from exc + "Error storing chunk " + f"{self._flat_chunk_basename(key, chunk_coords)} in " + f"{self.base_path}: {exc}" ) from exc def _chunk_path(self, key, chunk_coords, pattern=None): if pattern is None: diff --git a/src/neuroglancer_scripts/http_accessor.py b/src/neuroglancer_scripts/http_accessor.py index 048e81e..da423a4 100644 --- a/src/neuroglancer_scripts/http_accessor.py +++ b/src/neuroglancer_scripts/http_accessor.py @@ -61,8 +61,8 @@ def file_exists(self, relative_path): return False r.raise_for_status() except requests.exceptions.RequestException as exc: - raise DataAccessError("Error probing the existence of {0}: {1}" - .format(file_url, exc)) from exc + raise DataAccessError("Error probing the existence of " + f"{file_url}: {exc}") from exc return True def fetch_file(self, relative_path): @@ -71,6 +71,5 @@ def fetch_file(self, relative_path): r = self._session.get(file_url) r.raise_for_status() except requests.exceptions.RequestException as exc: - raise DataAccessError("Error reading {0}: {1}" - .format(file_url, exc)) from exc + raise DataAccessError(f"Error reading {file_url}: {exc}") from exc return r.content diff --git a/src/neuroglancer_scripts/mesh.py b/src/neuroglancer_scripts/mesh.py index f253ce8..3f693b3 100644 --- a/src/neuroglancer_scripts/mesh.py +++ b/src/neuroglancer_scripts/mesh.py @@ -84,23 +84,22 @@ def save_mesh_as_neuroglancer_vtk(file, vertices, triangles, file.write("# vtk DataFile Version 3.0\n") if title: title += ". " - title += "Written by neuroglancer-scripts-{0}.".format( - neuroglancer_scripts.__version__ - ) - file.write("{0}\n".format(title[:255])) + title += ("Written by neuroglancer-scripts-" + f"{neuroglancer_scripts.__version__}.") + file.write(f"{title[:255]}\n") file.write("ASCII\n") file.write("DATASET POLYDATA\n") - file.write("POINTS {0:d} {1}\n".format(vertices.shape[0], "float")) + file.write("POINTS {:d} {}\n".format(vertices.shape[0], "float")) if not np.can_cast(vertices.dtype, np.float32): # As of a8ce681660864ab3ac7c1086c0b4262e40f24707 Neuroglancer reads # everything as float32 anyway logger.warning("Vertex coordinates will be converted to float32") np.savetxt(file, vertices.astype(np.float32), fmt="%.9g") - file.write("POLYGONS {0:d} {1:d}\n" - .format(triangles.shape[0], 4 * triangles.shape[0])) + file.write(f"POLYGONS {triangles.shape[0]:d} {4 * triangles.shape[0]:d}\n" + ) np.savetxt(file, np.insert(triangles, 0, 3, axis=1), fmt="%d") if vertex_attributes: - file.write("POINT_DATA {0:d}\n".format(vertices.shape[0])) + file.write(f"POINT_DATA {vertices.shape[0]:d}\n") for vertex_attribute in vertex_attributes: name = vertex_attribute["name"] assert re.match("\\s", name) is None @@ -117,12 +116,12 @@ def save_mesh_as_neuroglancer_vtk(file, vertices, triangles, if not np.can_cast(values.dtype, np.float32): # As of a8ce681660864ab3ac7c1086c0b4262e40f24707 Neuroglancer # reads everything as float32 anyway - logger.warning("Data for the '{0}' vertex attribute will be " - "converted to float32".format(name)) - file.write("SCALARS {0} {1}".format(name, "float")) + logger.warning(f"Data for the '{name}' vertex attribute will " + "be converted to float32") + file.write("SCALARS {} {}".format(name, "float")) if num_components != 1: - file.write(" {0:d}".format(num_components)) - file.write("\nLOOKUP_TABLE {0}\n".format("default")) + file.write(f" {num_components:d}") + file.write("\nLOOKUP_TABLE {}\n".format("default")) np.savetxt(file, values.astype(np.float32), fmt="%.9g") diff --git a/src/neuroglancer_scripts/scripts/convert_chunks.py b/src/neuroglancer_scripts/scripts/convert_chunks.py index a680d0f..c0f0947 100755 --- a/src/neuroglancer_scripts/scripts/convert_chunks.py +++ b/src/neuroglancer_scripts/scripts/convert_chunks.py @@ -29,7 +29,7 @@ def convert_chunks_for_scale(chunk_reader, if chunk_reader.scale_is_lossy(key): logger.warning("Using data stored in a lossy format as an input for " - "conversion (for scale %s)" % key) + "conversion (for scale %s)", key) for chunk_size in scale_info["chunk_sizes"]: chunk_range = ((size[0] - 1) // chunk_size[0] + 1, @@ -38,7 +38,7 @@ def convert_chunks_for_scale(chunk_reader, for x_idx, y_idx, z_idx in tqdm( np.ndindex(chunk_range), total=np.prod(chunk_range), unit="chunk", - desc="converting scale {}".format(key)): + desc=f"converting scale {key}"): xmin = chunk_size[0] * x_idx xmax = min(chunk_size[0] * (x_idx + 1), size[0]) ymin = chunk_size[1] * y_idx diff --git a/src/neuroglancer_scripts/scripts/mesh_to_precomputed.py b/src/neuroglancer_scripts/scripts/mesh_to_precomputed.py index f6dc4a8..2dcd6ea 100755 --- a/src/neuroglancer_scripts/scripts/mesh_to_precomputed.py +++ b/src/neuroglancer_scripts/scripts/mesh_to_precomputed.py @@ -124,15 +124,15 @@ def parse_command_line(argv): try: matrix = np.fromstring(args.coord_transform, sep=",") except ValueError as exc: - parser.error("cannot parse --coord-transform: {}" - .format(exc.args[0])) + parser.error(f"cannot parse --coord-transform: {exc.args[0]}" + ) if len(matrix) == 12: matrix = matrix.reshape(3, 4) elif len(matrix) == 16: matrix = matrix.reshape(4, 4) else: parser.error("--coord-transform must have 12 or 16 elements" - " ({} passed)".format(len(matrix))) + f" ({len(matrix)} passed)") args.coord_transform = matrix diff --git a/src/neuroglancer_scripts/scripts/scale_stats.py b/src/neuroglancer_scripts/scripts/scale_stats.py index 87fa516..54d2974 100755 --- a/src/neuroglancer_scripts/scripts/scale_stats.py +++ b/src/neuroglancer_scripts/scripts/scale_stats.py @@ -42,18 +42,15 @@ def show_scales_info(info): if sharding_num_directories is not None else size_in_chunks[0] * (1 + size_in_chunks[1])) size_bytes = np.prod(size) * dtype.itemsize * num_channels - print("Scale {}, {}, chunk size {}:" - " {:,d} chunks, {:,d} directories, raw uncompressed size {}B" - .format(scale_name, shard_info, chunk_size, - num_chunks, num_directories, - readable_count(size_bytes))) + print(f"Scale {scale_name}, {shard_info}, chunk size {chunk_size}:" + f" {num_chunks:,d} chunks, {num_directories:,d} directories," + f" raw uncompressed size {readable_count(size_bytes)}B") total_size += size_bytes total_chunks += num_chunks total_directories += num_directories print("---") - print("Total: {:,d} chunks, {:,d} directories, raw uncompressed size {}B" - .format(total_chunks, total_directories, - readable_count(total_size))) + print(f"Total: {total_chunks:,d} chunks, {total_directories:,d} " + f"directories, raw uncompressed size {readable_count(total_size)}B") def show_scale_file_info(url, options={}): diff --git a/src/neuroglancer_scripts/scripts/slices_to_precomputed.py b/src/neuroglancer_scripts/scripts/slices_to_precomputed.py index e4e8446..6f3f6d6 100755 --- a/src/neuroglancer_scripts/scripts/slices_to_precomputed.py +++ b/src/neuroglancer_scripts/scripts/slices_to_precomputed.py @@ -106,8 +106,8 @@ def slices_to_raw_chunks(slice_filename_lists, dest_url, input_orientation, for filename_list in slice_filename_lists: if len(filename_list) != input_size[2]: - raise ValueError("{} slices found where {} were expected" - .format(len(filename_list), input_size[2])) + raise ValueError(f"{len(filename_list)} slices found where " + f"{input_size[2]} were expected") for slice_chunk_idx in trange((input_size[2] - 1) // input_chunk_size[2] + 1, @@ -126,7 +126,7 @@ def slices_to_raw_chunks(slice_filename_lists, dest_url, input_orientation, slice_slicing = np.s_[first_slice : last_slice : input_axis_inversions[2]] - tqdm.write("Reading slices {0} to {1} ({2}B memory needed)... " + tqdm.write("Reading slices {} to {} ({}B memory needed)... " .format(first_slice, last_slice - input_axis_inversions[2], readable_count(input_size[0] * input_size[1] @@ -152,8 +152,8 @@ def load_z_stack(slice_filenames): block = block[np.newaxis, :, :, :] else: raise ValueError( - "block has unexpected dimensionality (ndim={})" - .format(block.ndim) + f"block has unexpected dimensionality (ndim={block.ndim})" + ) return block diff --git a/src/neuroglancer_scripts/scripts/volume_to_precomputed_pyramid.py b/src/neuroglancer_scripts/scripts/volume_to_precomputed_pyramid.py index c9dd579..0e17334 100644 --- a/src/neuroglancer_scripts/scripts/volume_to_precomputed_pyramid.py +++ b/src/neuroglancer_scripts/scripts/volume_to_precomputed_pyramid.py @@ -56,7 +56,7 @@ def volume_to_precomputed_pyramid(volume_filename, info, accessor ) except neuroglancer_scripts.accessor.DataAccessError as exc: - logger.error("Cannot write info: {0}".format(exc)) + logger.error(f"Cannot write info: {exc}") return 1 volume_reader.nibabel_image_to_precomputed( img, precomputed_writer, diff --git a/src/neuroglancer_scripts/sharded_base.py b/src/neuroglancer_scripts/sharded_base.py index 7406c3e..19fde62 100644 --- a/src/neuroglancer_scripts/sharded_base.py +++ b/src/neuroglancer_scripts/sharded_base.py @@ -420,7 +420,7 @@ def info(self, val): def get_scale(self, key) -> Dict[str, Any]: scales = self.info.get("scales") try: - scale, = [scale for scale in scales if scale.get("key") == key] + scale, = (scale for scale in scales if scale.get("key") == key) return scale except ValueError as e: raise ValueError(f"key {key!r} not found in scales. Possible " diff --git a/src/neuroglancer_scripts/sharded_file_accessor.py b/src/neuroglancer_scripts/sharded_file_accessor.py index eccbcc4..f77c323 100644 --- a/src/neuroglancer_scripts/sharded_file_accessor.py +++ b/src/neuroglancer_scripts/sharded_file_accessor.py @@ -422,7 +422,7 @@ def fetch_file(self, relative_path): def store_file(self, relative_path, buf, overwrite=False, **kwargs): if not overwrite and self.file_exists(relative_path): - raise IOError(f"file at {relative_path} already exists") + raise OSError(f"file at {relative_path} already exists") with open(self.base_dir / relative_path, "wb") as fp: fp.write(buf) diff --git a/src/neuroglancer_scripts/utils.py b/src/neuroglancer_scripts/utils.py index ba3f033..bb77eca 100644 --- a/src/neuroglancer_scripts/utils.py +++ b/src/neuroglancer_scripts/utils.py @@ -92,7 +92,7 @@ def readable_count(count): return num_str + " " + prefix # Fallback: use the last prefix factor, prefix = _IEC_PREFIXES[-1] - return "{:,.0f} {}".format(count / factor, prefix) + return f"{count / factor:,.0f} {prefix}" LENGTH_UNITS = collections.OrderedDict([ diff --git a/src/neuroglancer_scripts/volume_reader.py b/src/neuroglancer_scripts/volume_reader.py index d3dd969..9e9a9ae 100644 --- a/src/neuroglancer_scripts/volume_reader.py +++ b/src/neuroglancer_scripts/volume_reader.py @@ -111,23 +111,20 @@ def nibabel_image_to_info(img, guessed_dtype = input_dtype.name else: guessed_dtype = "float32" - formatted_info = """\ + formatted_info = f"""\ {{ "type": "image", - "num_channels": {num_channels}, - "data_type": "{data_type}", + "num_channels": {shape[3] if len(shape) >= 4 else 1}, + "data_type": "{guessed_dtype}", "scales": [ {{ "encoding": "raw", - "size": {size}, - "resolution": {resolution}, + "size": {list(shape[:3])}, + "resolution": {[float(vs * 1_000_000) for vs in voxel_sizes[:3]]}, "voxel_offset": [0, 0, 0] }} ] -}}""".format(num_channels=shape[3] if len(shape) >= 4 else 1, - data_type=guessed_dtype, - size=list(shape[:3]), - resolution=[float(vs * 1_000_000) for vs in voxel_sizes[:3]]) +}}""" info = json.loads(formatted_info) # ensure well-formed JSON