Skip to content

Commit

Permalink
Merge pull request #41 from HumanBrainProject/python312_and_modernize
Browse files Browse the repository at this point in the history
Add tests for Python 3.12 and modernize code quality checks
  • Loading branch information
ylep authored Jul 2, 2024
2 parents 9cc58a5 + 9622c54 commit d2bc7de
Show file tree
Hide file tree
Showing 49 changed files with 261 additions and 281 deletions.
12 changes: 7 additions & 5 deletions .github/workflows/tox.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,11 @@ jobs:
run: |
tox
# 2024-07-02: disable CodeCov (temporarily?), uploads do not work even with a token
# Code coverage is run on Python 3.10, see tox.ini
- if: ${{ matrix.python-version == '3.10' }}
uses: codecov/codecov-action@v3
with:
fail_ci_if_error: true # optional (default = false)
verbose: true # optional (default = false)
#- if: ${{ matrix.python-version == '3.10' }}
# uses: codecov/codecov-action@v4
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# fail_ci_if_error: true # optional (default = false)
# verbose: true # optional (default = false)
13 changes: 7 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.6.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
Expand All @@ -15,11 +15,12 @@ repos:
- id: name-tests-test
- id: trailing-whitespace

- repo: https://github.com/pycqa/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies: [pep8-naming]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.0
hooks:
# Run the linter.
- id: ruff

- repo: https://github.com/mgedmin/check-manifest
rev: "0.49"
Expand Down
3 changes: 1 addition & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# neuroglancer-scripts documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 2 15:05:24 2018.
Expand All @@ -19,11 +18,11 @@
#
import os
import sys

sys.path.insert(0, os.path.abspath('../src/'))

import neuroglancer_scripts # noqa: E402


# -- General configuration ------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
Expand Down
13 changes: 6 additions & 7 deletions experimental/mesh_to_vtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,15 @@

import sys

import neuroglancer_scripts.mesh
import nibabel
import numpy as np

import neuroglancer_scripts.mesh


def mesh_file_to_vtk(input_filename, output_filename, data_format="ascii",
coord_transform=None):
"""Convert a mesh file read by nibabel to VTK format"""
print("Reading {}".format(input_filename))
print(f"Reading {input_filename}")
mesh = nibabel.load(input_filename)
print()
print("Summary")
Expand Down Expand Up @@ -45,7 +44,7 @@ def mesh_file_to_vtk(input_filename, output_filename, data_format="ascii",
# Gifti uses millimetres, Neuroglancer expects nanometres
points *= 1e6

with open(output_filename, "wt") as output_file:
with open(output_filename, "w") as output_file:
neuroglancer_scripts.mesh.save_mesh_as_neuroglancer_vtk(
output_file, points, triangles
)
Expand Down Expand Up @@ -79,15 +78,15 @@ def parse_command_line(argv):
try:
matrix = np.fromstring(args.coord_transform, sep=",")
except ValueError as exc:
parser.error("cannot parse --coord-transform: {}"
.format(exc.args[0]))
parser.error(f"cannot parse --coord-transform: {exc.args[0]}"
)
if len(matrix) == 12:
matrix = matrix.reshape(3, 4)
elif len(matrix) == 16:
matrix = matrix.reshape(4, 4)
else:
parser.error("--coord-transform must have 12 or 16 elements"
" ({} passed)".format(len(matrix)))
f" ({len(matrix)} passed)")

args.coord_transform = matrix

Expand Down
14 changes: 7 additions & 7 deletions experimental/off_to_vtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
def off_mesh_file_to_vtk(input_filename, output_filename, data_format="binary",
coord_transform=None):
"""Convert a mesh file from OFF format to VTK format"""
print("Reading {}".format(input_filename))
print(f"Reading {input_filename}")
with gzip.open(input_filename, "rt") as f:
header_keyword = f.readline().strip()
match = re.match(r"(ST)?(C)?(N)?(4)?(n)?OFF", header_keyword)
Expand All @@ -32,7 +32,7 @@ def off_mesh_file_to_vtk(input_filename, output_filename, data_format="binary",
assert match
num_vertices = int(match.group(1))
num_triangles = int(match.group(2))
vertices = np.empty((num_vertices, 3), dtype=np.float)
vertices = np.empty((num_vertices, 3), dtype=float)
for i in range(num_vertices):
components = f.readline().split()
assert len(components) >= 3
Expand All @@ -48,8 +48,8 @@ def off_mesh_file_to_vtk(input_filename, output_filename, data_format="binary",
triangles[i, 1] = float(components[2])
triangles[i, 2] = float(components[3])
print()
print("{0} vertices and {1} triangles read"
.format(num_vertices, num_triangles))
print(f"{num_vertices} vertices and {num_triangles} triangles read"
)

points = vertices

Expand Down Expand Up @@ -108,15 +108,15 @@ def parse_command_line(argv):
try:
matrix = np.fromstring(args.coord_transform, sep=",")
except ValueError as exc:
parser.error("cannot parse --coord-transform: {}"
.format(exc.args[0]))
parser.error(f"cannot parse --coord-transform: {exc.args[0]}"
)
if len(matrix) == 12:
matrix = matrix.reshape(3, 4)
elif len(matrix) == 16:
matrix = matrix.reshape(4, 4)
else:
parser.error("--coord-transform must have 12 or 16 elements"
" ({} passed)".format(len(matrix)))
f" ({len(matrix)} passed)")

args.coord_transform = matrix

Expand Down
16 changes: 10 additions & 6 deletions experimental/stl_to_precomputed.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
#
# This software is made available under the MIT licence, see LICENCE.txt.

# flake8: noqa
# noqa

"""
Convert a mesh from STL ASCII to Neuroglancer pre-computed mesh format
Currently STL triangles are just written to the output as is, i.e. normals are not considered
and equal vertices are not reused.
Currently STL triangles are just written to the output as is, i.e. normals are
not considered and equal vertices are not reused.
"""

import gzip
Expand Down Expand Up @@ -78,10 +78,13 @@ def parse_command_line(argv):
"""Parse the script's command line."""
import argparse
parser = argparse.ArgumentParser(
description="""Convert a mesh from STL ASCII to Neuroglancer pre-computed mesh format""")
description="Convert a mesh from STL ASCII to Neuroglancer "
"pre-computed mesh format")
parser.add_argument("input_filename")
parser.add_argument("output_filename")
parser.add_argument("--voxel-size", help="Voxel size in mm. Only isotropic voxels are supported for now. Default is 1.0",
parser.add_argument("--voxel-size", help="Voxel size in mm. Only "
"isotropic voxels are supported for now. Default is "
"1.0",
type=float, default=1.0)
parser.add_argument("--no-compression", help="Don't gzip the output.",
action="store_false", dest="compress")
Expand All @@ -93,7 +96,8 @@ def main(argv):
"""The script's entry point."""
args = parse_command_line(argv)
return stl_file_to_precomputed(
args.input_filename, args.output_filename, args.voxel_size, args.compress) or 0
args.input_filename, args.output_filename, args.voxel_size,
args.compress) or 0


if __name__ == "__main__":
Expand Down
19 changes: 19 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,22 @@ requires = [
"wheel",
]
build-backend = "setuptools.build_meta"

[tool.ruff]
target-version = "py37" # py36 does not exist
line-length = 79
indent-width = 4

[tool.ruff.lint]
extend-select = [
"F",
"E",
"W",
"I",
"N",
"NPY",
"UP",
]
ignore = [
"N802", # Gives false positives when a name contains an uppercase acronym
]
18 changes: 8 additions & 10 deletions script_tests/test_scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,10 @@

import nibabel
import numpy as np
import pytest
import PIL.Image

import pytest
from neuroglancer_scripts.mesh import read_precomputed_mesh


# Environment passed to sub-processes so that they raise an error on warnings
env = os.environ.copy()
env['PYTHONWARNINGS'] = 'error'
Expand All @@ -36,8 +34,8 @@ def test_jubrain_example_MPM(examples_dir, tmpdir):
try:
gzip.open(str(input_nifti)).read(348)
except OSError as exc:
pytest.skip("Cannot find a valid example file {0} for testing: {1}"
.format(input_nifti, exc))
pytest.skip(f"Cannot find a valid example file {input_nifti} for "
f"testing: {exc}")

output_dir = tmpdir / "MPM"
assert subprocess.call([
Expand Down Expand Up @@ -82,8 +80,8 @@ def test_all_in_one_conversion(examples_dir, tmpdir):
try:
gzip.open(str(input_nifti)).read(348)
except OSError as exc:
pytest.skip("Cannot find a valid example file {0} for testing: {1}"
.format(input_nifti, exc))
pytest.skip(f"Cannot find a valid example file {input_nifti} for "
f"testing: {exc}")

output_dir = tmpdir / "colin27T1_seg"
assert subprocess.call([
Expand All @@ -106,8 +104,8 @@ def test_sharded_conversion(examples_dir, tmpdir):
try:
gzip.open(str(input_nifti)).read(348)
except OSError as exc:
pytest.skip("Cannot find a valid example file {0} for testing: {1}"
.format(input_nifti, exc))
pytest.skip(f"Cannot find a valid example file {input_nifti} for "
f"testing: {exc}")

output_dir = tmpdir / "colin27T1_seg_sharded"
assert subprocess.call([
Expand All @@ -118,7 +116,7 @@ def test_sharded_conversion(examples_dir, tmpdir):
str(output_dir)
], env=env) == 4 # datatype not supported by neuroglancer

with open(output_dir / "info_fullres.json", "r") as fp:
with open(output_dir / "info_fullres.json") as fp:
fullres_info = json.load(fp=fp)
with open(output_dir / "info_fullres.json", "w") as fp:
fullres_info["data_type"] = "uint8"
Expand Down
5 changes: 3 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ classifiers =
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12
Topic :: Scientific/Engineering :: Medical Science Apps.
Topic :: Scientific/Engineering :: Visualization
keywords = neuroimaging
Expand All @@ -30,7 +31,7 @@ packages = find:
python_requires = ~=3.6
install_requires =
nibabel >= 2
numpy >= 1.11.0
numpy >= 1.17
pillow >= 1.1.6
requests >= 2
scikit-image # TODO use pillow instead
Expand All @@ -47,11 +48,11 @@ dev =
pytest
requests-mock
check-manifest
flake8
pep8-naming
pre-commit
pytest-cov
readme_renderer
ruff
sphinx
tox
docs =
Expand Down
8 changes: 4 additions & 4 deletions src/neuroglancer_scripts/_compressed_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@

import numpy as np

from neuroglancer_scripts.utils import ceil_div
from neuroglancer_scripts.chunk_encoding import InvalidFormatError
from neuroglancer_scripts.utils import ceil_div


def pad_block(block, block_size):
Expand Down Expand Up @@ -104,7 +104,7 @@ def _encode_channel(chunk_channel, block_size):

def _pack_encoded_values(encoded_values, bits):
if bits == 0:
return bytes()
return b""
else:
assert 32 % bits == 0
assert np.array_equal(encoded_values,
Expand Down Expand Up @@ -162,8 +162,8 @@ def _decode_channel_into(chunk, channel, buf, block_size):
bits = res[0] >> 24
if bits not in (0, 1, 2, 4, 8, 16, 32):
raise InvalidFormatError("Invalid number of encoding bits for "
"compressed_segmentation block ({0})"
.format(bits))
f"compressed_segmentation block ({bits})"
)
encoded_values_offset = 4 * res[1]
lookup_table_past_end = lookup_table_offset + chunk.itemsize * min(
(2 ** bits),
Expand Down
18 changes: 8 additions & 10 deletions src/neuroglancer_scripts/_jpeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import numpy as np
import PIL.Image


from neuroglancer_scripts.chunk_encoding import InvalidFormatError


Expand Down Expand Up @@ -47,17 +46,17 @@ def decode_chunk(buf, chunk_size, num_channels):
img = PIL.Image.open(io_buf)
except Exception as exc:
raise InvalidFormatError(
"The JPEG-encoded chunk could not be decoded: {0}"
.format(exc)) from exc
f"The JPEG-encoded chunk could not be decoded: {exc}"
) from exc

if num_channels == 1 and img.mode != "L":
raise InvalidFormatError(
"The JPEG chunk is encoded with mode={0} instead of L"
.format(img.mode))
f"The JPEG chunk is encoded with mode={img.mode} instead of L"
)
if num_channels == 3 and img.mode != "RGB":
raise InvalidFormatError(
"The JPEG chunk is encoded with mode={0} instead of RGB"
.format(img.mode))
f"The JPEG chunk is encoded with mode={img.mode} instead of RGB"
)

flat_chunk = np.asarray(img)
if num_channels == 3:
Expand All @@ -68,7 +67,6 @@ def decode_chunk(buf, chunk_size, num_channels):
chunk_size[2], chunk_size[1], chunk_size[0])
except Exception:
raise InvalidFormatError("The JPEG-encoded chunk has an incompatible "
"shape ({0} elements, expecting {1})"
.format(flat_chunk.size // num_channels,
np.prod(chunk_size)))
f"shape ({flat_chunk.size // num_channels} "
f"elements, expecting {np.prod(chunk_size)})")
return chunk
Loading

0 comments on commit d2bc7de

Please sign in to comment.