diff --git a/.bumpversion.cfg b/.bumpversion.cfg index dd26a87e..a7b0e196 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -4,14 +4,14 @@ commit = True tag = True sign_tags = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+)(?P\d+))? -serialize = +serialize = {major}.{minor}.{patch}.{release}{build} {major}.{minor}.{patch} [bumpversion:part:release] optional_value = prod first_value = dev -values = +values = dev prod diff --git a/.github/workflows/posix.yml b/.github/workflows/posix.yml new file mode 100644 index 00000000..767900f6 --- /dev/null +++ b/.github/workflows/posix.yml @@ -0,0 +1,45 @@ +--- +name: Build for Linux + +on: [push, pull_request] + +jobs: + test: + name: ${{ matrix.platform }} ${{ matrix.python-version }} + runs-on: ${{ matrix.platform }} + + strategy: + fail-fast: false + matrix: + platform: [ubuntu-latest, macos-latest] + python-version: [3.6, 3.7, 3.8] + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v1 + with: + auto-update-conda: true + channels: conda-forge,ome + environment-file: environment.yml + python-version: ${{ matrix.python-version }} + + - name: Install Linux dependencies + if: matrix.platform == 'ubuntu-latest' + run: | + sudo apt install libxkbcommon-x11-0 + /sbin/start-stop-daemon --start --quiet \ + --pidfile /tmp/custom_xvfb_99.pid --make-pidfile \ + --background --exec /usr/bin/Xvfb \ + -- :99 -screen 0 1920x1200x24 -ac +extension GLX + + - name: Install dependencies + shell: bash -l {0} + run: | + python -m pip install --upgrade pip wheel pytest tox .[napari] + + - name: Run pytest + shell: bash -l {0} + run: pytest diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 89acc262..40cf340e 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -5,16 +5,27 @@ on: [push, pull_request] jobs: test: - runs-on: windows-latest + name: ${{ matrix.platform }} ${{ matrix.python-version }} + runs-on: ${{ matrix.platform }} + + strategy: + fail-fast: false + matrix: + platform: [windows-latest] + python-version: [3.6, 3.7, 3.8] + steps: - name: Checkout uses: actions/checkout@v2 - - name: Setup python - uses: actions/setup-python@v2 + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v1 with: - python-version: 3.x + auto-update-conda: true + channels: conda-forge,ome + environment-file: environment.yml + python-version: ${{ matrix.python-version }} - name: Clone gl-ci-helpers run: git clone --depth 1 git://github.com/vtkiorg/gl-ci-helpers.git @@ -24,9 +35,7 @@ jobs: run: gl-ci-helpers/appveyor/install_opengl.ps1 - name: Run pytest - shell: bash + shell: bash -l {0} run: > - export PATH="/c/Python37:/c/Python37/Scripts:$PATH" && - python -m pip install --upgrade pip wheel pytest tox scikit-image - .[napari] && + python -m pip install --upgrade pip wheel pytest tox .[napari] && pytest diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000..cbe16a06 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,7 @@ +[settings] +known_third_party = cv2,dask,numpy,pytest,requests,scipy,setuptools,skimage,vispy,zarr +multi_line_output=6 +include_trailing_comma=False +force_grid_wrap=0 +use_parentheses=True +line_length=120 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 479f2642..b46ad1db 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,27 +1,84 @@ --- repos: + + - repo: https://github.com/asottile/seed-isort-config + rev: v1.9.3 + hooks: + - id: seed-isort-config + + - repo: https://github.com/timothycrosley/isort + rev: 5.3.2 + hooks: + - id: isort + - repo: https://github.com/ambv/black rev: 19.10b0 hooks: - id: black args: [--target-version=py36] + + - repo: https://github.com/asottile/pyupgrade + rev: v2.7.2 + hooks: + - id: pyupgrade + args: + - --py36-plus + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v1.2.3 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-json + files: \.(json)$ + - id: check-yaml + - id: fix-encoding-pragma + args: + - --remove + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: pretty-format-json + args: + - --autofix + - repo: https://gitlab.com/pycqa/flake8 rev: 3.8.3 hooks: - id: flake8 + additional_dependencies: [ + flake8-blind-except, + flake8-builtins, + flake8-rst-docstrings, + flake8-logging-format, + ] args: [ # default black line length is 88 - --max-line-length=88, + "--max-line-length=88", # Conflicts with black: E203 whitespace before ':' - --ignore=E203, + "--ignore=E203", + "--rst-roles=class,func,ref,module,const", ] + - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.782 hooks: - id: mypy - files: ome_zarr.py + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.782 + hooks: + - id: mypy + args: [ + --disallow-untyped-defs, + --ignore-missing-imports, + ] + exclude: tests/*|setup.py + - repo: https://github.com/adrienverge/yamllint.git rev: v1.24.2 hooks: - id: yamllint # args: [--config-data=relaxed] + # diff --git a/.travis.yml b/.travis.yml index cdfa1ee9..ba5d5ac9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,15 +4,10 @@ dist: bionic language: python jobs: include: - - name: Python36 - python: "3.6" - - name: Python37 - python: "3.7" - - name: Python38 - python: "3.8" - stage: deploy python: "3.6" if: tag IS present + install: skip script: skip skip_cleanup: true deploy: @@ -22,15 +17,3 @@ jobs: distributions: sdist bdist_wheel on: tags: true - -# command to install dependencies -install: - - pip install .[napari] - - pip install scikit-image # only needed for tests - -# command to run tests -script: pytest - -cache: - directories: - - $HOME/.cache/pip diff --git a/environment.yml b/environment.yml new file mode 100644 index 00000000..6fc893b4 --- /dev/null +++ b/environment.yml @@ -0,0 +1,27 @@ +#name: z +channels: + - defaults + - ome + - conda-forge +dependencies: + - pyside2 + - napari + - flake8 + - ipython + - mypy + - omero-py + - opencv + - pip + - py-opencv + - pytest + - requests + - s3fs + - scikit-image + - scipy + - xarray + - zarr >= 2.4.0 + - pip: + - pre-commit + - pytest-qt +# python.app -- only install on OSX: +# sys_platform environment marker doesn't work in environment.yml diff --git a/ome_zarr.py b/ome_zarr.py deleted file mode 100644 index 2c327677..00000000 --- a/ome_zarr.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -This module is a napari plugin. - -It implements the ``napari_get_reader`` hook specification, (to create -a reader plugin). - -Type annotations here are OPTIONAL! -If you don't care to annotate the return types of your functions -your plugin doesn't need to import, or even depend on napari at all! - -Replace code below accordingly. -""" -import os -import json -import requests -import dask.array as da -import warnings - -from dask.diagnostics import ProgressBar -from vispy.color import Colormap - -from urllib.parse import urlparse - - -try: - from napari_plugin_engine import napari_hook_implementation -except ImportError: - - def napari_hook_implementation(func, *args, **kwargs): - return func - - -import logging - -# for optional type hints only, otherwise you can delete/ignore this stuff -from typing import List, Optional, Union, Any, Tuple, Dict, Callable - -LOGGER = logging.getLogger("ome_zarr") - - -LayerData = Union[Tuple[Any], Tuple[Any, Dict], Tuple[Any, Dict, str]] -PathLike = Union[str, List[str]] -ReaderFunction = Callable[[PathLike], List[LayerData]] -# END type hint stuff. - - -@napari_hook_implementation -def napari_get_reader(path: PathLike) -> Optional[ReaderFunction]: - """ - Returns a reader for supported paths that include IDR ID - - - URL of the form: https://s3.embassy.ebi.ac.uk/idr/zarr/v0.1/ID.zarr/ - """ - if isinstance(path, list): - path = path[0] - instance = parse_url(path) - if instance is not None and instance.is_zarr(): - return instance.get_reader_function() - # Ignoring this path - return None - - -def parse_url(path): - # Check is path is local directory first - if os.path.isdir(path): - return LocalZarr(path) - else: - result = urlparse(path) - if result.scheme in ("", "file"): - # Strips 'file://' if necessary - return LocalZarr(result.path) - else: - return RemoteZarr(path) - - -class BaseZarr: - def __init__(self, path): - self.zarr_path = path.endswith("/") and path or f"{path}/" - self.zarray = self.get_json(".zarray") - self.zgroup = self.get_json(".zgroup") - if self.zgroup: - self.root_attrs = self.get_json(".zattrs") - if "omero" in self.root_attrs: - self.image_data = self.root_attrs["omero"] - # TODO: start checking metadata version - else: - # Backup location that can be removed in the future. - warnings.warn("deprecated loading of omero.josn", DeprecationWarning) - self.image_data = self.get_json("omero.json") - - def __str__(self): - suffix = "" - if self.zgroup: - suffix += " [zgroup]" - if self.zarray: - suffix += " [zarray]" - return f"{self.zarr_path}{suffix}" - - def is_zarr(self): - return self.zarray or self.zgroup - - def is_ome_zarr(self): - return self.zgroup and "multiscales" in self.root_attrs - - def has_ome_labels(self): - "Does the zarr Image also include /labels sub-dir" - return self.get_json("labels/.zgroup") - - def is_ome_label(self): - return self.zarr_path.endswith("labels/") and self.get_json(".zgroup") - - def get_label_names(self): - """ - Called if is_ome_label is true - """ - # If this is a label, the names are in root .zattrs - return self.root_attrs.get("labels", []) - - def get_json(self, subpath): - raise NotImplementedError("unknown") - - def get_reader_function(self): - if not self.is_zarr(): - raise Exception(f"not a zarr: {self}") - return self.reader_function - - def to_rgba(self, v): - """Get rgba (0-1) e.g. (1, 0.5, 0, 1) from integer""" - return [x / 255 for x in v.to_bytes(4, signed=True, byteorder="big")] - - def reader_function(self, path: Optional[PathLike]) -> Optional[List[LayerData]]: - """Take a path or list of paths and return a list of LayerData tuples.""" - - if isinstance(path, list): - path = path[0] - # TODO: safe to ignore this path? - - if self.is_ome_zarr(): - LOGGER.debug(f"treating {path} as ome-zarr") - layers = [self.load_ome_zarr()] - # If the Image contains labels... - if self.has_ome_labels(): - label_path = os.path.join(self.zarr_path, "labels") - # Create a new OME Zarr Reader to load labels - labels = self.__class__(label_path).reader_function(None) - if labels: - layers.extend(labels) - return layers - - elif self.zarray: - LOGGER.debug(f"treating {path} as raw zarr") - data = da.from_zarr(f"{self.zarr_path}") - return [(data,)] - - elif self.is_ome_label(): - LOGGER.debug(f"treating {path} as labels") - return self.load_ome_labels() - - else: - LOGGER.debug(f"ignoring {path}") - return None - - def load_omero_metadata(self, assert_channel_count=None): - """Load OMERO metadata as json and convert for napari""" - metadata = {} - try: - model = "unknown" - rdefs = self.image_data.get("rdefs", {}) - if rdefs: - model = rdefs.get("model", "unset") - - channels = self.image_data.get("channels", None) - if channels is None: - return {} - - count = None - try: - count = len(channels) - if assert_channel_count: - if count != assert_channel_count: - LOGGER.error( - ( - "unexpected channel count: " - f"{count}!={assert_channel_count}" - ) - ) - return {} - except Exception: - LOGGER.warn(f"error counting channels: {channels}") - return {} - - colormaps = [] - contrast_limits = [None for x in channels] - names = [("channel_%d" % idx) for idx, ch in enumerate(channels)] - visibles = [True for x in channels] - - for idx, ch in enumerate(channels): - # 'FF0000' -> [1, 0, 0] - - color = ch.get("color", None) - if color is not None: - rgb = [(int(color[i : i + 2], 16) / 255) for i in range(0, 6, 2)] - if model == "greyscale": - rgb = [1, 1, 1] - colormaps.append(Colormap([[0, 0, 0], rgb])) - - label = ch.get("label", None) - if label is not None: - names[idx] = label - - visible = ch.get("active", None) - if visible is not None: - visibles[idx] = visible - - window = ch.get("window", None) - if window is not None: - start = window.get("start", None) - end = window.get("end", None) - if start is None or end is None: - # Disable contrast limits settings if one is missing - contrast_limits = None - elif contrast_limits is not None: - contrast_limits[idx] = [start, end] - - metadata["colormap"] = colormaps - metadata["contrast_limits"] = contrast_limits - metadata["name"] = names - metadata["visible"] = visibles - except Exception as e: - LOGGER.error(f"failed to parse metadata: {e}") - - return metadata - - def load_ome_zarr(self): - - resolutions = ["0"] # TODO: could be first alphanumeric dataset on err - try: - for k, v in self.root_attrs.items(): - LOGGER.info("root_attr: %s", k) - LOGGER.debug(v) - if "multiscales" in self.root_attrs: - datasets = self.root_attrs["multiscales"][0]["datasets"] - resolutions = [d["path"] for d in datasets] - except Exception as e: - raise e - - pyramid = [] - for resolution in resolutions: - # data.shape is (t, c, z, y, x) by convention - data = da.from_zarr(f"{self.zarr_path}{resolution}") - chunk_sizes = [ - str(c[0]) + (" (+ %s)" % c[-1] if c[-1] != c[0] else "") - for c in data.chunks - ] - LOGGER.info("resolution: %s", resolution) - LOGGER.info(" - shape (t, c, z, y, x) = %s", data.shape) - LOGGER.info(" - chunks = %s", chunk_sizes) - LOGGER.info(" - dtype = %s", data.dtype) - pyramid.append(data) - - if len(pyramid) == 1: - pyramid = pyramid[0] - - metadata = self.load_omero_metadata(data.shape[1]) - return (pyramid, {"channel_axis": 1, **metadata}) - - def load_ome_labels(self): - # look for labels in this dir... - label_names = self.get_label_names() - labels = [] - for name in label_names: - label_path = os.path.join(self.zarr_path, name) - label_attrs = self.get_json(f"{name}/.zattrs") - colors = {} - if "color" in label_attrs: - color_dict = label_attrs.get("color") - colors = dict() - for k, v in color_dict.items(): - try: - if k in ("true", "false"): - k = bool(k) - else: - k = int(k) - colors[k] = self.to_rgba(v) - except Exception as e: - LOGGER.error(f"invalid color - {k}={v}: {e}") - data = da.from_zarr(label_path) - # Split labels into separate channels, 1 per layer - for n in range(data.shape[1]): - labels.append( - ( - data[:, n, :, :, :], - {"visible": False, "name": name, "color": colors}, - "labels", - ) - ) - return labels - - -class LocalZarr(BaseZarr): - def get_json(self, subpath): - filename = os.path.join(self.zarr_path, subpath) - - if not os.path.exists(filename): - return {} - - with open(filename) as f: - return json.loads(f.read()) - - -class RemoteZarr(BaseZarr): - def get_json(self, subpath): - url = f"{self.zarr_path}{subpath}" - try: - rsp = requests.get(url) - except Exception: - LOGGER.warn(f"unreachable: {url} -- details logged at debug") - LOGGER.debug("exception details:", exc_info=True) - return {} - try: - if rsp.status_code in (403, 404): # file doesn't exist - return {} - return rsp.json() - except Exception: - LOGGER.error(f"({rsp.status_code}): {rsp.text}") - return {} - - -def info(path): - """ - print information about the ome-zarr fileset - """ - zarr = parse_url(path) - if not zarr.is_ome_zarr(): - print(f"not an ome-zarr: {zarr}") - return - reader = zarr.get_reader_function() - data = reader(path) - LOGGER.debug(data) - - -def download(path, output_dir=".", zarr_name=""): - """ - download zarr from URL - """ - omezarr = parse_url(path) - if not omezarr.is_ome_zarr(): - print(f"not an ome-zarr: {path}") - return - - image_id = omezarr.image_data.get("id", "unknown") - LOGGER.info("image_id %s", image_id) - if not zarr_name: - zarr_name = f"{image_id}.zarr" - - try: - datasets = [x["path"] for x in omezarr.root_attrs["multiscales"][0]["datasets"]] - except KeyError: - datasets = ["0"] - LOGGER.info("datasets %s", datasets) - resolutions = [da.from_zarr(path, component=str(i)) for i in datasets] - # levels = list(range(len(resolutions))) - - target_dir = os.path.join(output_dir, f"{zarr_name}") - if os.path.exists(target_dir): - print(f"{target_dir} already exists!") - return - print(f"downloading to {target_dir}") - - pbar = ProgressBar() - for dataset, data in reversed(list(zip(datasets, resolutions))): - LOGGER.info(f"resolution {dataset}...") - with pbar: - data.to_zarr(os.path.join(target_dir, dataset)) - - with open(os.path.join(target_dir, ".zgroup"), "w") as f: - f.write(json.dumps(omezarr.zgroup)) - with open(os.path.join(target_dir, ".zattrs"), "w") as f: - f.write(json.dumps(omezarr.root_attrs)) diff --git a/ome_zarr/__init__.py b/ome_zarr/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ome_zarr/cli.py b/ome_zarr/cli.py new file mode 100755 index 00000000..6b1d59b3 --- /dev/null +++ b/ome_zarr/cli.py @@ -0,0 +1,140 @@ +"""Entrypoint for the `ome_zarr` command-line tool.""" +import argparse +import logging +import sys +from typing import List + +from .data import astronaut, coins, create_zarr +from .scale import Scaler +from .utils import download as zarr_download +from .utils import info as zarr_info + + +def config_logging(loglevel: int, args: argparse.Namespace) -> None: + """Configure logging taking the `verbose` and `quiet` arguments into account. + + Each `-v` increases the `loglevel` by 10 and each `-q` reduces the loglevel by 10. + For example, an initial loglevel of `INFO` will be converted to `DEBUG` via `-qqv`. + """ + loglevel = loglevel - (10 * args.verbose) + (10 * args.quiet) + logging.basicConfig(level=loglevel) + # DEBUG logging for s3fs so we can track remote calls + logging.getLogger("s3fs").setLevel(logging.DEBUG) + + +def info(args: argparse.Namespace) -> None: + """Wrap the :func:`~ome_zarr.utils.info` method.""" + config_logging(logging.WARN, args) + list(zarr_info(args.path)) + + +def download(args: argparse.Namespace) -> None: + """Wrap the :func:`~ome_zarr.utils.download` method.""" + config_logging(logging.WARN, args) + zarr_download(args.path, args.output) + + +def create(args: argparse.Namespace) -> None: + """Chooses between data generation methods in :module:`ome_zarr.utils` like. + + :func:`~ome_zarr.data.coins` or :func:`~ome_zarr.data.astronaut`. + """ + config_logging(logging.WARN, args) + if args.method == "coins": + method = coins + label_name = "coins" + elif args.method == "astronaut": + method = astronaut + label_name = "circles" + else: + raise Exception(f"unknown method: {args.method}") + create_zarr(args.path, method=method, label_name=label_name) + + +def scale(args: argparse.Namespace) -> None: + """Wrap the :func:`~ome_zarr.scale.Scaler.scale` method.""" + scaler = Scaler( + copy_metadata=args.copy_metadata, + downscale=args.downscale, + in_place=args.in_place, + labeled=args.labeled, + max_layer=args.max_layer, + method=args.method, + ) + scaler.scale(args.input_array, args.output_directory) + + +def main(args: List[str] = None) -> None: + """Run appropriate function with argparse arguments, handling errors.""" + parser = argparse.ArgumentParser() + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="increase loglevel for each use, e.g. -vvv", + ) + parser.add_argument( + "-q", + "--quiet", + action="count", + default=0, + help="decrease loglevel for each use, e.q. -qqq", + ) + subparsers = parser.add_subparsers(dest="command") + subparsers.required = True + + # info + parser_info = subparsers.add_parser("info") + parser_info.add_argument("path") + parser_info.set_defaults(func=info) + + # download + parser_download = subparsers.add_parser("download") + parser_download.add_argument("path") + parser_download.add_argument("--output", default=".") + parser_download.set_defaults(func=download) + + # create + parser_create = subparsers.add_parser("create") + parser_create.add_argument( + "--method", choices=("coins", "astronaut"), default="coins" + ) + parser_create.add_argument("path") + parser_create.set_defaults(func=create) + + parser_scale = subparsers.add_parser("scale") + parser_scale.add_argument("input_array") + parser_scale.add_argument("output_directory") + parser_scale.add_argument( + "--labeled", + action="store_true", + help="assert that the list of unique pixel values doesn't change", + ) + parser_scale.add_argument( + "--copy-metadata", + action="store_true", + help="copies the array metadata to the new group", + ) + parser_scale.add_argument( + "--method", choices=list(Scaler.methods()), default="nearest" + ) + parser_scale.add_argument( + "--in-place", action="store_true", help="if true, don't write the base array" + ) + parser_scale.add_argument("--downscale", type=int, default=2) + parser_scale.add_argument("--max_layer", type=int, default=4) + parser_scale.set_defaults(func=scale) + + ns = parser.parse_args(args) + + if args is None: + ns = parser.parse_args(sys.argv[1:]) + else: + ns = parser.parse_args(args) + + try: + ns.func(ns) + except AssertionError as error: + logging.getLogger("ome_zarr.cli").error(error) + sys.exit(2) diff --git a/ome_zarr/conversions.py b/ome_zarr/conversions.py new file mode 100644 index 00000000..b7096ba4 --- /dev/null +++ b/ome_zarr/conversions.py @@ -0,0 +1,34 @@ +"""Simple conversion helpers.""" + +from typing import List + + +def int_to_rgba(v: int) -> List[float]: + """Get rgba (0-1) e.g. (1, 0.5, 0, 1) from integer. + >>> print(int_to_rgba(0)) + [0.0, 0.0, 0.0, 0.0] + >>> print([round(x, 3) for x in int_to_rgba(100100)]) + [0.0, 0.004, 0.529, 0.016] + """ + return [x / 255 for x in v.to_bytes(4, signed=True, byteorder="big")] + + +def int_to_rgba_255(v: int) -> List[int]: + """Get rgba (0-255) from integer. + >>> print(int_to_rgba_255(0)) + [0, 0, 0, 0] + >>> print([round(x, 3) for x in int_to_rgba_255(100100)]) + [0, 1, 135, 4] + """ + return [x for x in v.to_bytes(4, signed=True, byteorder="big")] + + +def rgba_to_int(r: int, g: int, b: int, a: int) -> int: + """Use int.from_bytes to convert a color tuple. + + >>> print(rgba_to_int(0, 0, 0, 0)) + 0 + >>> print(rgba_to_int(0, 1, 135, 4)) + 100100 + """ + return int.from_bytes([r, g, b, a], byteorder="big", signed=True) diff --git a/ome_zarr/data.py b/ome_zarr/data.py new file mode 100644 index 00000000..ff926a72 --- /dev/null +++ b/ome_zarr/data.py @@ -0,0 +1,166 @@ +"""Functions for generating synthetic data.""" +from typing import Callable, List, Tuple + +import numpy as np +import zarr +from scipy.ndimage import zoom +from skimage import data +from skimage.filters import threshold_otsu +from skimage.measure import label +from skimage.morphology import closing, remove_small_objects, square +from skimage.segmentation import clear_border + +from .conversions import rgba_to_int +from .scale import Scaler + +CHANNEL_DIMENSION = 1 + + +def coins() -> Tuple[List, List]: + """Sample data from skimage.""" + # Thanks to Juan + # https://gist.github.com/jni/62e07ddd135dbb107278bc04c0f9a8e7 + image = data.coins()[50:-50, 50:-50] + thresh = threshold_otsu(image) + bw = closing(image > thresh, square(4)) + cleared = remove_small_objects(clear_border(bw), 20) + label_image = label(cleared) + + pyramid = list(reversed([zoom(image, 2 ** i, order=3) for i in range(4)])) + labels = list(reversed([zoom(label_image, 2 ** i, order=0) for i in range(4)])) + + pyramid = [rgb_to_5d(layer) for layer in pyramid] + labels = [rgb_to_5d(layer) for layer in labels] + return pyramid, labels + + +def astronaut() -> Tuple[List, List]: + """Sample data from skimage.""" + scaler = Scaler() + + pixels = rgb_to_5d(np.tile(data.astronaut(), (2, 2, 1))) + pyramid = scaler.nearest(pixels) + + shape = list(pyramid[0].shape) + shape[1] = 1 + label = np.zeros(shape) + make_circle(100, 100, 1, label[0, 0, 0, 200:300, 200:300]) + make_circle(150, 150, 2, label[0, 0, 0, 250:400, 250:400]) + labels = scaler.nearest(label) + + return pyramid, labels + + +def make_circle(h: int, w: int, value: int, target: np.ndarray) -> None: + """Apply a 2D circular mask to the given array. + + >>> import numpy as np + >>> example = np.zeros((8, 8)) + >>> make_circle(8, 8, 1, example) + >>> print(example) + [[0. 0. 0. 0. 0. 0. 0. 0.] + [0. 0. 1. 1. 1. 1. 1. 0.] + [0. 1. 1. 1. 1. 1. 1. 1.] + [0. 1. 1. 1. 1. 1. 1. 1.] + [0. 1. 1. 1. 1. 1. 1. 1.] + [0. 1. 1. 1. 1. 1. 1. 1.] + [0. 1. 1. 1. 1. 1. 1. 1.] + [0. 0. 1. 1. 1. 1. 1. 0.]] + """ + x = np.arange(0, w) + y = np.arange(0, h) + + cx = w // 2 + cy = h // 2 + r = min(w, h) // 2 + + mask = (x[np.newaxis, :] - cx) ** 2 + (y[:, np.newaxis] - cy) ** 2 < r ** 2 + target[mask] = value + + +def rgb_to_5d(pixels: np.ndarray) -> List: + """Convert an RGB image into 5D image (t, c, z, y, x).""" + if len(pixels.shape) == 2: + stack = np.array([pixels]) + channels = np.array([stack]) + elif len(pixels.shape) == 3: + size_c = pixels.shape[2] + channels = [np.array([pixels[:, :, c]]) for c in range(size_c)] + else: + assert False, f"expecting 2 or 3d: ({pixels.shape})" + video = np.array([channels]) + return video + + +def write_multiscale(pyramid: List, group: zarr.Group) -> None: + """Write a pyramid with multiscale metadata to disk.""" + paths = [] + for path, dataset in enumerate(pyramid): + group.create_dataset(str(path), data=pyramid[path]) + paths.append({"path": str(path)}) + + multiscales = [{"version": "0.1", "datasets": paths}] + group.attrs["multiscales"] = multiscales + + +def create_zarr( + zarr_directory: str, + method: Callable[..., Tuple[List, List]] = coins, + label_name: str = "coins", +) -> None: + """Generate a synthetic image pyramid with labels.""" + pyramid, labels = method() + + store = zarr.DirectoryStore(zarr_directory) + grp = zarr.group(store) + write_multiscale(pyramid, grp) + + if pyramid[0].shape[CHANNEL_DIMENSION] == 1: + image_data = { + "channels": [{"window": {"start": 0, "end": 1}}], + "rdefs": {"model": "greyscale"}, + } + else: + image_data = { + "channels": [ + { + "color": "FF0000", + "window": {"start": 0, "end": 1}, + "label": "Red", + "active": True, + }, + { + "color": "00FF00", + "window": {"start": 0, "end": 1}, + "label": "Green", + "active": True, + }, + { + "color": "0000FF", + "window": {"start": 0, "end": 1}, + "label": "Blue", + "active": True, + }, + ], + "rdefs": {"model": "color"}, + } + grp.attrs["omero"] = image_data + + if labels: + + labels_grp = grp.create_group("labels") + labels_grp.attrs["labels"] = [label_name] + + label_grp = labels_grp.create_group(label_name) + write_multiscale(labels, label_grp) + label_grp.attrs["color"] = { + "1": rgba_to_int(50, 0, 0, 0), + "2": rgba_to_int(0, 50, 0, 0), + "3": rgba_to_int(0, 0, 50, 0), + "4": rgba_to_int(100, 0, 0, 0), + "5": rgba_to_int(0, 100, 0, 0), + "6": rgba_to_int(0, 0, 100, 0), + "7": rgba_to_int(50, 50, 50, 0), + "8": rgba_to_int(100, 100, 100, 0), + } + label_grp.attrs["image"] = {"array": "../../", "source": {}} diff --git a/ome_zarr/io.py b/ome_zarr/io.py new file mode 100644 index 00000000..99161859 --- /dev/null +++ b/ome_zarr/io.py @@ -0,0 +1,146 @@ +"""Reading logic for ome-zarr. + +Primary entry point is the :func:`~ome_zarr.io.parse_url` method. +""" + +import json +import logging +import os +import posixpath +from abc import ABC, abstractmethod +from typing import Optional +from urllib.parse import urlparse + +import dask.array as da +import requests + +from .types import JSONDict + +LOGGER = logging.getLogger("ome_zarr.io") + + +class BaseZarrLocation(ABC): + """ + Base IO primitive for reading Zarr data. + + No assumptions about the existence of the given path string are made. + Attempts are made to load various metadata files and cache them internally. + """ + + def __init__(self, path: str) -> None: + self.zarr_path: str = path.endswith("/") and path or f"{path}/" + self.zarray: JSONDict = self.get_json(".zarray") + self.zgroup: JSONDict = self.get_json(".zgroup") + self.__metadata: JSONDict = {} + self.__exists: bool = True + if self.zgroup: + self.__metadata = self.get_json(".zattrs") + elif self.zarray: + self.__metadata = self.get_json(".zattrs") + else: + self.__exists = False + + def __repr__(self) -> str: + """Print the path as well as whether this is a group or an array.""" + suffix = "" + if self.zgroup: + suffix += " [zgroup]" + if self.zarray: + suffix += " [zarray]" + return f"{self.zarr_path}{suffix}" + + def exists(self) -> bool: + """Return true if either zgroup or zarray metadata exists.""" + return self.__exists + + @property + def root_attrs(self) -> JSONDict: + """Return the contents of the zattrs file.""" + return dict(self.__metadata) + + @abstractmethod + def get_json(self, subpath: str) -> JSONDict: + """Must be implemented by subclasses.""" + raise NotImplementedError("unknown") + + def load(self, subpath: str) -> da.core.Array: + """Use dask.array.from_zarr to load the subpath.""" + return da.from_zarr(f"{self.zarr_path}{subpath}") + + # TODO: update to from __future__ import annotations with 3.7+ + def create(self, path: str) -> "BaseZarrLocation": + """Create a new Zarr location for the given path.""" + subpath = posixpath.join(self.zarr_path, path) + subpath = posixpath.normpath(subpath) + LOGGER.debug(f"open({self.__class__.__name__}({subpath}))") + return self.__class__(posixpath.normpath(f"{subpath}")) + + +class LocalZarrLocation(BaseZarrLocation): + """ + Uses the :module:`json` library for loading JSON from disk. + """ + + def get_json(self, subpath: str) -> JSONDict: + """ + Load and return a given subpath of self.zarr_path as JSON. + + If a file does not exist, an empty response is returned rather + than an exception. + """ + filename = os.path.join(self.zarr_path, subpath) + + if not os.path.exists(filename): + LOGGER.debug(f"{filename} does not exist") + return {} + + with open(filename) as f: + return json.loads(f.read()) + + +class RemoteZarrLocation(BaseZarrLocation): + """ Uses the :module:`requests` library for accessing Zarr metadata files. """ + + def get_json(self, subpath: str) -> JSONDict: + """ + Load and return a given subpath of self.zarr_path as JSON. + + HTTP 403 and 404 responses are treated as if the file does not exist. + Exceptions during the remote connection are logged at the WARN level. + All other exceptions log at the ERROR level. + """ + url = f"{self.zarr_path}{subpath}" + try: + rsp = requests.get(url) + except Exception: + LOGGER.warn(f"unreachable: {url} -- details logged at debug") + LOGGER.debug("exception details:", exc_info=True) + return {} + try: + if rsp.status_code in (403, 404): # file doesn't exist + return {} + return rsp.json() + except Exception: + LOGGER.error(f"({rsp.status_code}): {rsp.text}") + return {} + + +def parse_url(path: str) -> Optional[BaseZarrLocation]: + """Convert a path string or URL to a BaseZarrLocation subclass. + + >>> parse_url('does-not-exist') + """ + # Check is path is local directory first + if os.path.isdir(path): + return LocalZarrLocation(path) + else: + result = urlparse(path) + zarr: Optional[BaseZarrLocation] = None + if result.scheme in ("", "file"): + # Strips 'file://' if necessary + zarr = LocalZarrLocation(result.path) + else: + zarr = RemoteZarrLocation(path) + if zarr.exists(): + return zarr + return None diff --git a/ome_zarr/napari.py b/ome_zarr/napari.py new file mode 100644 index 00000000..ec172b5f --- /dev/null +++ b/ome_zarr/napari.py @@ -0,0 +1,82 @@ +"""This module is a napari plugin. + +It implements the ``napari_get_reader`` hook specification, (to create a reader plugin). +""" + + +import logging +import warnings +from typing import Any, Callable, Dict, Iterator, List, Optional + +from .data import CHANNEL_DIMENSION +from .io import parse_url +from .reader import Label, Node, Reader +from .types import LayerData, PathLike, ReaderFunction + +try: + from napari_plugin_engine import napari_hook_implementation +except ImportError: + + def napari_hook_implementation( + func: Callable, *args: Any, **kwargs: Any + ) -> Callable: + return func + + +LOGGER = logging.getLogger("ome_zarr.napari") + + +@napari_hook_implementation +def napari_get_reader(path: PathLike) -> Optional[ReaderFunction]: + """Returns a reader for supported paths that include IDR ID. + + - URL of the form: https://s3.embassy.ebi.ac.uk/idr/zarr/v0.1/ID.zarr/ + """ + if isinstance(path, list): + if len(path) > 1: + warnings.warn("more than one path is not currently supported") + path = path[0] + zarr = parse_url(path) + if zarr: + reader = Reader(zarr) + return transform(reader()) + # Ignoring this path + return None + + +def transform(nodes: Iterator[Node]) -> Optional[ReaderFunction]: + def f(*args: Any, **kwargs: Any) -> List[LayerData]: + results: List[LayerData] = list() + + for node in nodes: + data: List[Any] = node.data + metadata: Dict[str, Any] = node.metadata + if data is None or len(data) < 1: + LOGGER.debug(f"skipping non-data {node}") + else: + LOGGER.debug(f"transforming {node}") + shape = data[0].shape + + layer_type: str = "image" + if node.load(Label): + layer_type = "labels" + if "colormap" in metadata: + del metadata["colormap"] + + elif shape[CHANNEL_DIMENSION] > 1: + metadata["channel_axis"] = CHANNEL_DIMENSION + else: + for x in ("name", "visible", "contrast_limits", "colormap"): + if x in metadata: + try: + metadata[x] = metadata[x][0] + except Exception: + del metadata[x] + + rv: LayerData = (data, metadata, layer_type) + LOGGER.debug(f"Transformed: {rv}") + results.append(rv) + + return results + + return f diff --git a/ome_zarr/reader.py b/ome_zarr/reader.py new file mode 100644 index 00000000..e8700a87 --- /dev/null +++ b/ome_zarr/reader.py @@ -0,0 +1,383 @@ +"""Reading logic for ome-zarr.""" + +import logging +from abc import ABC +from typing import Any, Dict, Iterator, List, Optional, Type, Union, cast + +import dask.array as da +from vispy.color import Colormap + +from .conversions import int_to_rgba +from .io import BaseZarrLocation +from .types import JSONDict + +LOGGER = logging.getLogger("ome_zarr.reader") + + +class Node: + """Container for a representation of the binary data somewhere in the data + hierarchy.""" + + def __init__( + self, + zarr: BaseZarrLocation, + root: Union["Node", "Reader", List[str]], + visibility: bool = True, + ): + self.zarr = zarr + self.root = root + self.seen: List[str] = [] + if isinstance(root, Node) or isinstance(root, Reader): + self.seen = root.seen + else: + self.seen = cast(List[str], root) + self.__visible = visibility + + # Likely to be updated by specs + self.metadata: JSONDict = dict() + self.data: List[da.core.Array] = list() + self.specs: List[Spec] = [] + self.pre_nodes: List[Node] = [] + self.post_nodes: List[Node] = [] + + # TODO: this should be some form of plugin infra over subclasses + if Labels.matches(zarr): + self.specs.append(Labels(self)) + if Label.matches(zarr): + self.specs.append(Label(self)) + if Multiscales.matches(zarr): + self.specs.append(Multiscales(self)) + if OMERO.matches(zarr): + self.specs.append(OMERO(self)) + + @property + def visible(self) -> bool: + """True if this node should be displayed by default. + + An invisible node may have been requested by the instrument, by the + user, or by the ome_zarr library after determining that this node + is lower priority, e.g. to prevent too many nodes from being shown + at once. + """ + return self.__visible + + @visible.setter + def visible(self, visibility: bool) -> bool: + """ + Set the visibility for this node, returning the previous value. + + A change of the visibility will propagate to all subnodes. + """ + old = self.__visible + if old != visibility: + self.__visible = visibility + for node in self.pre_nodes + self.post_nodes: + node.visible = visibility + return old + + def load(self, spec_type: Type["Spec"]) -> Optional["Spec"]: + for spec in self.specs: + if isinstance(spec, spec_type): + return spec + return None + + def add( + self, + zarr: BaseZarrLocation, + prepend: bool = False, + visibility: Optional[bool] = None, + ) -> "Optional[Node]": + """Create a child node if this location has not yet been seen. + + Newly created nodes may be considered higher or lower priority than + the current node, and may be set to invisible if necessary. + + :param zarr: Location in the node hierarchy to be added + :param prepend: Whether the newly created node should be given higher + priority than the current node, defaults to False + :param visibility: Allows setting the node (and therefore layer) + as deactivated for initial display or if None the value of the + current node will be propagated to the new node, defaults to None + :return: Newly created node if this is the first time it has been + encountered; None if the node has already been processed. + """ + + if zarr.zarr_path in self.seen: + LOGGER.debug(f"already seen {zarr}; stopping recursion") + return None + + if visibility is None: + visibility = self.visible + + self.seen.append(zarr.zarr_path) + node = Node(zarr, self, visibility=visibility) + if prepend: + self.pre_nodes.append(node) + else: + self.post_nodes.append(node) + + return node + + def write_metadata(self, metadata: JSONDict) -> None: + for spec in self.specs: + metadata.update(self.zarr.root_attrs) + + def __repr__(self) -> str: + suffix = "" + if self.zarr.zgroup: + suffix += " [zgroup]" + if self.zarr.zarray: + suffix += " [zarray]" + if not self.visible: + suffix += " (hidden)" + return f"{self.zarr.zarr_path}{suffix}" + + +class Spec(ABC): + """Base class for specifications that can be implemented by groups or arrays within + the hierarchy. + + Multiple subclasses may apply. + """ + + @staticmethod + def matches(zarr: BaseZarrLocation) -> bool: + raise NotImplementedError() + + def __init__(self, node: Node) -> None: + self.node = node + self.zarr = node.zarr + LOGGER.debug(f"treating {self.zarr} as {self.__class__.__name__}") + for k, v in self.zarr.root_attrs.items(): + LOGGER.info("root_attr: %s", k) + LOGGER.debug(v) + + def lookup(self, key: str, default: Any) -> Any: + return self.zarr.root_attrs.get(key, default) + + +class Labels(Spec): + """Relatively small specification for the well-known "labels" group which only + contains the name of subgroups which should be loaded as labeled images.""" + + @staticmethod + def matches(zarr: BaseZarrLocation) -> bool: + """Does the Zarr Image group also include a /labels sub-group?""" + # TODO: also check for "labels" entry and perhaps version? + return bool("labels" in zarr.root_attrs) + + def __init__(self, node: Node) -> None: + super().__init__(node) + label_names = self.lookup("labels", []) + for name in label_names: + child_zarr = self.zarr.create(name) + if child_zarr.exists(): + node.add(child_zarr) + + +class Label(Spec): + """An additional aspect to a multiscale image is that it can be a labeled image, in + which each discrete pixel value represents a separate object.""" + + @staticmethod + def matches(zarr: BaseZarrLocation) -> bool: + """If label-specific metadata is present, then return true.""" + # FIXME: this should be the "label" metadata soon + return bool("colors" in zarr.root_attrs or "image" in zarr.root_attrs) + + def __init__(self, node: Node) -> None: + super().__init__(node) + + image = self.lookup("image", {}).get("array", None) + parent_zarr = None + if image: + # This is an ome mask, load the image + parent_zarr = self.zarr.create(image) + if parent_zarr.exists(): + LOGGER.debug(f"delegating to parent image: {parent_zarr}") + node.add(parent_zarr, prepend=True, visibility=False) + else: + parent_zarr = None + if parent_zarr is None: + LOGGER.warn(f"no parent found for {self}: {image}") + + # Metadata: TODO move to a class + colors: Dict[Union[int, bool], List[float]] = {} + color_dict = self.lookup("color", {}) + if color_dict: + for k, v in color_dict.items(): + try: + if k.lower() == "true": + k = True + elif k.lower() == "false": + k = False + else: + k = int(k) + colors[k] = int_to_rgba(v) + except Exception as e: + LOGGER.error(f"invalid color - {k}={v}: {e}") + + # TODO: a metadata transform should be provided by specific impls. + name = self.zarr.zarr_path.split("/")[-1] + node.metadata.update( + { + "visible": node.visible, + "name": name, + "color": colors, + "metadata": {"image": self.lookup("image", {}), "path": name}, + } + ) + + +class Multiscales(Spec): + @staticmethod + def matches(zarr: BaseZarrLocation) -> bool: + """is multiscales metadata present?""" + if zarr.zgroup: + if "multiscales" in zarr.root_attrs: + return True + return False + + def __init__(self, node: Node) -> None: + super().__init__(node) + + try: + datasets = self.lookup("multiscales", [])[0]["datasets"] + datasets = [d["path"] for d in datasets] + self.datasets: List[str] = datasets + LOGGER.info("datasets %s", datasets) + except Exception as e: + LOGGER.error(f"failed to parse multiscale metadata: {e}") + return # EARLY EXIT + + for resolution in self.datasets: + # data.shape is (t, c, z, y, x) by convention + data: da.core.Array = self.zarr.load(resolution) + chunk_sizes = [ + str(c[0]) + (" (+ %s)" % c[-1] if c[-1] != c[0] else "") + for c in data.chunks + ] + LOGGER.info("resolution: %s", resolution) + LOGGER.info(" - shape (t, c, z, y, x) = %s", data.shape) + LOGGER.info(" - chunks = %s", chunk_sizes) + LOGGER.info(" - dtype = %s", data.dtype) + node.data.append(data) + + # Load possible node data + child_zarr = self.zarr.create("labels") + if child_zarr.exists(): + node.add(child_zarr, visibility=False) + + +class OMERO(Spec): + @staticmethod + def matches(zarr: BaseZarrLocation) -> bool: + return bool("omero" in zarr.root_attrs) + + def __init__(self, node: Node) -> None: + super().__init__(node) + # TODO: start checking metadata version + self.image_data = self.lookup("omero", {}) + + try: + model = "unknown" + rdefs = self.image_data.get("rdefs", {}) + if rdefs: + model = rdefs.get("model", "unset") + + channels = self.image_data.get("channels", None) + if channels is None: + return # EARLY EXIT + + try: + len(channels) + except Exception: + LOGGER.warn(f"error counting channels: {channels}") + return # EARLY EXIT + + colormaps = [] + contrast_limits: Optional[List[Optional[Any]]] = [None for x in channels] + names: List[str] = [("channel_%d" % idx) for idx, ch in enumerate(channels)] + visibles: List[bool] = [True for x in channels] + + for idx, ch in enumerate(channels): + # 'FF0000' -> [1, 0, 0] + + color = ch.get("color", None) + if color is not None: + rgb = [(int(color[i : i + 2], 16) / 255) for i in range(0, 6, 2)] + # TODO: make this value an enumeration + if model == "greyscale": + rgb = [1, 1, 1] + colormaps.append(Colormap([[0, 0, 0], rgb])) + + label = ch.get("label", None) + if label is not None: + names[idx] = label + + visible = ch.get("active", None) + if visible is not None: + visibles[idx] = visible and node.visible + + window = ch.get("window", None) + if window is not None: + start = window.get("start", None) + end = window.get("end", None) + if start is None or end is None: + # Disable contrast limits settings if one is missing + contrast_limits = None + elif contrast_limits is not None: + contrast_limits[idx] = [start, end] + + node.metadata["name"] = names + node.metadata["visible"] = visibles + node.metadata["contrast_limits"] = contrast_limits + node.metadata["colormap"] = colormaps + except Exception as e: + LOGGER.error(f"failed to parse metadata: {e}") + + +class Reader: + """Parses the given Zarr instance into a collection of Nodes properly ordered + depending on context. + + Depending on the starting point, metadata may be followed up or down the Zarr + hierarchy. + """ + + def __init__(self, zarr: BaseZarrLocation) -> None: + assert zarr.exists() + self.zarr = zarr + self.seen: List[str] = [zarr.zarr_path] + + def __call__(self) -> Iterator[Node]: + node = Node(self.zarr, self) + if node.specs: # Something has matched + + LOGGER.debug(f"treating {self.zarr} as ome-zarr") + yield from self.descend(node) + + # TODO: API thoughts for the Spec type + # - ask for recursion or not + # - ask for "provides data", "overrides data" + + elif self.zarr.zarray: # Nothing has matched + LOGGER.debug(f"treating {self.zarr} as raw zarr") + data = da.from_zarr(f"{self.zarr.zarr_path}") + node.data.append(data) + yield node + + else: + LOGGER.debug(f"ignoring {self.zarr}") + # yield nothing + + def descend(self, node: Node, depth: int = 0) -> Iterator[Node]: + + for pre_node in node.pre_nodes: + yield from self.descend(pre_node, depth + 1) + + LOGGER.debug(f"returning {node}") + yield node + + for post_node in node.post_nodes: + yield from self.descend(post_node, depth + 1) diff --git a/ome_zarr/scale.py b/ome_zarr/scale.py new file mode 100644 index 00000000..b73630ef --- /dev/null +++ b/ome_zarr/scale.py @@ -0,0 +1,206 @@ +"""Module for downsampling numpy arrays via various methods. + +See the :class:`~ome_zarr.scale.Scaler` class for details. +""" +import inspect +import logging +import os +from collections.abc import MutableMapping +from dataclasses import dataclass +from typing import Callable, Iterator, List + +import cv2 +import numpy as np +import zarr +from scipy.ndimage import zoom +from skimage.transform import downscale_local_mean, pyramid_gaussian, pyramid_laplacian + +LOGGER = logging.getLogger("ome_zarr.scale") + + +@dataclass +class Scaler: + """Helper class for performing various types of downsampling. + + A method can be chosen by name such as "nearest". All methods on this + that do not begin with "_" and not either "methods" or "scale" are valid + choices. These values can be returned by the + :func:`~ome_zarr.scale.Scaler.methods` method. + + >>> import numpy as np + >>> data = np.zeros((1, 1, 1, 64, 64)) + >>> scaler = Scaler() + >>> downsampling = scaler.nearest(data) + >>> for x in downsampling: + ... print(x.shape) + (1, 1, 1, 64, 64) + (1, 1, 1, 32, 32) + (1, 1, 1, 16, 16) + (1, 1, 1, 8, 8) + (1, 1, 1, 4, 4) + """ + + copy_metadata: bool = False + downscale: int = 2 + in_place: bool = False + labeled: bool = False + max_layer: int = 4 + method: str = "nearest" + + @staticmethod + def methods() -> Iterator[str]: + """Return the name of all methods which define a downsampling. + + Any of the returned values can be used as the `methods` + argument to the + :func:`Scaler constructor ` + """ + funcs = inspect.getmembers(Scaler, predicate=inspect.isfunction) + for name, func in funcs: + if name in ("methods", "scale"): + continue + if name.startswith("_"): + continue + yield name + + def scale(self, input_array: str, output_directory: str) -> None: + """Perform downsampling to disk.""" + func = getattr(self, self.method, None) + if not func: + raise Exception + + store = self.__check_store(output_directory) + base = zarr.open_array(input_array) + pyramid = func(base) + + if self.labeled: + self.__assert_values(pyramid) + + grp = self.__create_group(store, base, pyramid) + + if self.copy_metadata: + print(f"copying attribute keys: {list(base.attrs.keys())}") + grp.attrs.update(base.attrs) + + def __check_store(self, output_directory: str) -> MutableMapping: + """Return a Zarr store if it doesn't already exist.""" + assert not os.path.exists(output_directory) + return zarr.DirectoryStore(output_directory) + + def __assert_values(self, pyramid: List[np.ndarray]) -> None: + """Check for a single unique set of values for all pyramid levels.""" + expected = set(np.unique(pyramid[0])) + print(f"level 0 {pyramid[0].shape} = {len(expected)} labels") + for i in range(1, len(pyramid)): + level = pyramid[i] + print(f"level {i}", pyramid[i].shape, len(expected)) + found = set(np.unique(level)) + if not expected.issuperset(found): + raise Exception( + f"{len(found)} found values are not " + "a subset of {len(expected)} values" + ) + + def __create_group( + self, store: MutableMapping, base: np.ndarray, pyramid: List[np.ndarray] + ) -> zarr.hierarchy.Group: + """Create group and datasets.""" + grp = zarr.group(store) + grp.create_dataset("base", data=base) + series = [] + for i, dataset in enumerate(pyramid): + if i == 0: + path = "base" + else: + path = "%s" % i + grp.create_dataset(path, data=pyramid[i]) + series.append({"path": path}) + return grp + + def nearest(self, base: np.ndarray) -> List[np.ndarray]: + """ + Downsample using :func:`cv2.resize`. + + The :const:`cvs2.INTER_NEAREST` interpolation method is used. + """ + return self._by_plane(base, self.__nearest) + + def __nearest(self, plane: np.ndarray, sizeY: int, sizeX: int) -> np.ndarray: + """Apply the 2-dimensional transformation.""" + return cv2.resize( + plane, + dsize=(sizeY // self.downscale, sizeX // self.downscale), + interpolation=cv2.INTER_NEAREST, + ) + + def gaussian(self, base: np.ndarray) -> List[np.ndarray]: + """Downsample using :func:`skimage.transform.pyramid_gaussian`.""" + return list( + pyramid_gaussian( + base, + downscale=self.downscale, + max_layer=self.max_layer, + multichannel=False, + ) + ) + + def laplacian(self, base: np.ndarray) -> List[np.ndarray]: + """Downsample using :func:`skimage.transform.pyramid_laplacian`.""" + return list( + pyramid_laplacian( + base, + downscale=self.downscale, + max_layer=self.max_layer, + multichannel=False, + ) + ) + + def local_mean(self, base: np.ndarray) -> List[np.ndarray]: + """Downsample using :func:`skimage.transform.downscale_local_mean`.""" + rv = [base] + # FIXME: fix hard-coding + rv = [base] + for i in range(self.max_layer): + rv.append( + downscale_local_mean( + rv[-1], factors=(1, 1, 1, self.downscale, self.downscale) + ) + ) + return rv + + def zoom(self, base: np.ndarray) -> List[np.ndarray]: + """Downsample using :func:`scipy.ndimage.zoom`.""" + rv = [base] + print(base.shape) + for i in range(self.max_layer): + print(i, self.downscale) + rv.append(zoom(base, self.downscale ** i)) + print(rv[-1].shape) + return list(reversed(rv)) + + # + # Helpers + # + + def _by_plane( + self, base: np.ndarray, func: Callable[[np.ndarray, int, int], np.ndarray], + ) -> np.ndarray: + """Loop over 3 of the 5 dimensions and apply the func transform.""" + assert 5 == len(base.shape) + + rv = [base] + for i in range(self.max_layer): + fiveD = rv[-1] + # FIXME: fix hard-coding of dimensions + T, C, Z, Y, X = fiveD.shape + + smaller = None + for t in range(T): + for c in range(C): + for z in range(Z): + out = func(fiveD[t][c][z][:], Y, X) + if smaller is None: + smaller = np.zeros((T, C, Z, out.shape[0], out.shape[1])) + smaller[t][c][z] = out + rv.append(smaller) + return rv diff --git a/ome_zarr/types.py b/ome_zarr/types.py new file mode 100644 index 00000000..ac1e15cd --- /dev/null +++ b/ome_zarr/types.py @@ -0,0 +1,11 @@ +"""Definition of complex types for use elsewhere.""" + +from typing import Any, Callable, Dict, List, Tuple, Union + +LayerData = Union[Tuple[Any], Tuple[Any, Dict], Tuple[Any, Dict, str]] + +PathLike = Union[str, List[str]] + +ReaderFunction = Callable[[PathLike], List[LayerData]] + +JSONDict = Dict[str, Any] diff --git a/ome_zarr/utils.py b/ome_zarr/utils.py new file mode 100644 index 00000000..c20058e6 --- /dev/null +++ b/ome_zarr/utils.py @@ -0,0 +1,131 @@ +"""Utility methods for ome_zarr access.""" + +import json +import logging +import os +from typing import Iterator, List + +import dask.array as da +import zarr +from dask.diagnostics import ProgressBar + +from .io import parse_url +from .reader import Multiscales, Node, Reader +from .types import JSONDict + +LOGGER = logging.getLogger("ome_zarr.utils") + + +def info(path: str) -> Iterator[Node]: + """Print information about an OME-Zarr fileset. + + All :class:`Nodes ` that are found from the given path will + be visited recursively. + """ + zarr = parse_url(path) + assert zarr, f"not a zarr: {zarr}" + reader = Reader(zarr) + for node in reader(): + + if not node.specs: + print(f"not an ome-zarr: {zarr}") + continue + + print(node) + print(" - metadata") + for spec in node.specs: + print(f" - {spec.__class__.__name__}") + print(" - data") + for array in node.data: + print(f" - {array.shape}") + LOGGER.debug(node.data) + yield node + + +def download(input_path: str, output_dir: str = ".") -> None: + """Download an OME-Zarr from the given path. + + All :class:`Nodes ` that are found from the given path will + be included in the download. + """ + location = parse_url(input_path) + assert location, f"not a zarr: {location}" + + reader = Reader(location) + nodes: List[Node] = list() + paths: List[str] = list() + for node in reader(): + nodes.append(node) + paths.append(node.zarr.zarr_path) + + common = strip_common_prefix(paths) + root = os.path.join(output_dir, common) + + assert not os.path.exists(root), f"{root} already exists!" + print("downloading...") + for path in paths: + print(" ", path) + print(f"to {output_dir}") + + for path, node in sorted(zip(paths, nodes)): + target_dir = os.path.join(output_dir, f"{path}") + resolutions: List[da.core.Array] = [] + datasets: List[str] = [] + for spec in node.specs: + if isinstance(spec, Multiscales): + datasets = spec.datasets + resolutions = node.data + if datasets and resolutions: + pbar = ProgressBar() + for dataset, data in reversed(list(zip(datasets, resolutions))): + LOGGER.info(f"resolution {dataset}...") + with pbar: + data.to_zarr(os.path.join(target_dir, dataset)) + else: + # Assume a group that needs metadata, like labels + zarr.group(target_dir) + + with open(os.path.join(target_dir, ".zgroup"), "w") as f: + f.write(json.dumps(node.zarr.zgroup)) + with open(os.path.join(target_dir, ".zattrs"), "w") as f: + metadata: JSONDict = {} + node.write_metadata(metadata) + f.write(json.dumps(metadata)) + + +def strip_common_prefix(paths: List[str]) -> str: + """Find and remove the prefix common to all strings. + + Returns the last element of the common prefix. + An exception is thrown if no common prefix exists. + + >>> paths = ["a/b", "a/b/c"] + >>> strip_common_prefix(paths) + 'b' + >>> paths + ['b', 'b/c'] + """ + parts: List[List[str]] = [x.split(os.path.sep) for x in paths] + + first_mismatch = 0 + min_length = min([len(x) for x in parts]) + + for idx in range(min_length): + if len({x[idx] for x in parts}) == 1: + first_mismatch += 1 + else: + break + + if first_mismatch <= 0: + msg = "No common prefix:\n" + for path in parts: + msg += f"{path}\n" + raise Exception(msg) + else: + common = parts[0][first_mismatch - 1] + + for idx, path in enumerate(parts): + base = os.path.sep.join(path[first_mismatch - 1 :]) + paths[idx] = base + + return common diff --git a/ome_zarr_cli.py b/ome_zarr_cli.py deleted file mode 100755 index 6dd32789..00000000 --- a/ome_zarr_cli.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -import argparse -import logging - -from ome_zarr import info as zarr_info -from ome_zarr import download as zarr_download - - -def config_logging(loglevel, args): - loglevel = loglevel - (10 * args.verbose) + (10 * args.quiet) - logging.basicConfig(level=loglevel) - # DEBUG logging for s3fs so we can track remote calls - logging.getLogger("s3fs").setLevel(logging.DEBUG) - - -def info(args): - config_logging(logging.INFO, args) - zarr_info(args.path) - - -def download(args): - config_logging(logging.WARN, args) - zarr_download(args.path, args.output, args.name) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="increase loglevel for each use, e.g. -vvv", - ) - parser.add_argument( - "-q", - "--quiet", - action="count", - default=0, - help="decrease loglevel for each use, e.q. -qqq", - ) - subparsers = parser.add_subparsers(dest="command") - subparsers.required = True - - # foo - parser_info = subparsers.add_parser("info") - parser_info.add_argument("path") - parser_info.set_defaults(func=info) - - # download - parser_download = subparsers.add_parser("download") - parser_download.add_argument("path") - parser_download.add_argument("--output", default="") - parser_download.add_argument("--name", default="") - parser_download.set_defaults(func=download) - - args = parser.parse_args() - args.func(args) diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..df3eb518 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --doctest-modules diff --git a/setup.py b/setup.py index 7b7282fd..98aafdec 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,9 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -import os import codecs +import os +from typing import List + from setuptools import setup @@ -11,7 +12,8 @@ def read(fname): return codecs.open(file_path, encoding="utf-8").read() -install_requires = [] +install_requires: List[List[str]] = [] +install_requires += (["dataclasses;python_version<'3.7'"],) install_requires += (["numpy"],) install_requires += (["dask"],) install_requires += (["zarr"],) @@ -20,6 +22,7 @@ def read(fname): install_requires += (["requests"],) install_requires += (["toolz"],) install_requires += (["vispy"],) +install_requires += (["opencv-contrib-python-headless"],) setup( @@ -29,7 +32,7 @@ def read(fname): url="https://github.com/ome/ome-zarr-py", description="Implementation of images in Zarr files.", long_description=read("README.rst"), - py_modules=["ome_zarr", "ome_zarr_cli"], + py_modules=["ome_zarr"], python_requires=">=3.6", install_requires=install_requires, classifiers=[ @@ -43,9 +46,10 @@ def read(fname): "License :: OSI Approved :: BSD License", ], entry_points={ - "console_scripts": ["ome_zarr = ome_zarr_cli:main"], - "napari.plugin": ["ome_zarr = ome_zarr"], + "console_scripts": ["ome_zarr = ome_zarr.cli:main"], + "napari.plugin": ["ome_zarr = ome_zarr.napari"], + "pytest11": ["napari-conftest = napari.conftest"], }, extras_require={"napari": ["napari"]}, - tests_require=["pytest", "pytest-capturelog"], + tests_require=["pytest"], ) diff --git a/tests/create_test_data.py b/tests/create_test_data.py deleted file mode 100644 index 6378d939..00000000 --- a/tests/create_test_data.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python -import zarr -import numpy as np -from skimage import data -from skimage.transform import pyramid_gaussian - - -def create_zarr(zarr_directory): - - base = np.tile(data.astronaut(), (2, 2, 1)) - gaussian = list(pyramid_gaussian(base, downscale=2, max_layer=4, multichannel=True)) - - pyramid = [] - # convert each level of pyramid into 5D image (t, c, z, y, x) - for pixels in gaussian: - red = pixels[:, :, 0] - green = pixels[:, :, 1] - blue = pixels[:, :, 2] - # wrap to make 5D: (t, c, z, y, x) - pixels = np.array([np.array([red]), np.array([green]), np.array([blue])]) - pixels = np.array([pixels]) - pyramid.append(pixels) - - store = zarr.DirectoryStore(zarr_directory) - grp = zarr.group(store) - paths = [] - for path, dataset in enumerate(pyramid): - grp.create_dataset(str(path), data=pyramid[path]) - paths.append({"path": str(path)}) - - image_data = { - "id": 1, - "channels": [ - { - "color": "FF0000", - "window": {"start": 0, "end": 1}, - "label": "Red", - "active": True, - }, - { - "color": "00FF00", - "window": {"start": 0, "end": 1}, - "label": "Green", - "active": True, - }, - { - "color": "0000FF", - "window": {"start": 0, "end": 1}, - "label": "Blue", - "active": True, - }, - ], - "rdefs": {"model": "color"}, - } - - multiscales = [{"version": "0.1", "datasets": paths}] - grp.attrs["multiscales"] = multiscales - grp.attrs["omero"] = image_data diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 00000000..16de2d49 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,65 @@ +import os +from collections import deque +from pathlib import Path +from typing import Sequence + +import pytest + +from ome_zarr.cli import main +from ome_zarr.utils import strip_common_prefix + + +class TestCli: + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = (tmpdir / "data").mkdir() + + def test_coins_info(self): + filename = str(self.path) + "-1" + main(["create", "--method=coins", filename]) + main(["info", filename]) + + def test_astronaut_info(self): + filename = str(self.path) + "-2" + main(["create", "--method=astronaut", filename]) + main(["info", filename]) + + def test_astronaut_download(self, tmpdir): + out = str(tmpdir / "out") + filename = str(self.path) + "-3" + basename = os.path.split(filename)[-1] + main(["create", "--method=astronaut", filename]) + main(["download", filename, f"--output={out}"]) + main(["info", f"{out}/{basename}"]) + + def test_strip_prefix_relative(self): + top = Path(".") / "d" + mid = Path(".") / "d" / "e" + bot = Path(".") / "d" / "e" / "f" + self._rotate_and_test(top, mid, bot) + + def test_strip_prefix_absolute(self): + top = Path("/") / "a" / "b" / "c" / "d" + mid = Path("/") / "a" / "b" / "c" / "d" / "e" + bot = Path("/") / "a" / "b" / "c" / "d" / "e" / "f" + self._rotate_and_test(top, mid, bot) + + def _rotate_and_test(self, *hierarchy: Path, reverse: bool = True): + results: Sequence[str] = ( + str(Path("d")), + str(Path("d") / "e"), + str(Path("d") / "e" / "f"), + ) + for x in range(3): + firstpass = deque(hierarchy) + firstpass.rotate(1) + + copy = [str(x) for x in firstpass] + common = strip_common_prefix(copy) + assert "d" == common + assert set(copy) == set(results) + + if reverse: + secondpass: deque = deque(hierarchy) + secondpass.reverse() + self._rotate_and_test(*list(secondpass), reverse=False) diff --git a/tests/test_napari.py b/tests/test_napari.py new file mode 100644 index 00000000..0ee3a3c3 --- /dev/null +++ b/tests/test_napari.py @@ -0,0 +1,72 @@ +import sys + +import numpy as np +import pytest + +from ome_zarr.data import astronaut, create_zarr +from ome_zarr.napari import napari_get_reader + + +class TestNapari: + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = tmpdir.mkdir("data") + create_zarr(str(self.path), astronaut, "astronaut") + + def assert_layers(self, layers, visible_1, visible_2): + # TODO: check name + + assert len(layers) == 2 + image, label = layers + + data, metadata, layer_type = self.assert_layer(image) + assert 1 == metadata["channel_axis"] + assert ["Red", "Green", "Blue"] == metadata["name"] + assert [[0, 1]] * 3 == metadata["contrast_limits"] + assert [visible_1] * 3 == metadata["visible"] + + data, metadata, layer_type = self.assert_layer(label) + assert visible_2 == metadata["visible"] + + def assert_layer(self, layer_data): + data, metadata, layer_type = layer_data + if not data or not metadata: + assert False, f"unknown layer: {layer_data}" + assert layer_type in ("image", "labels") + return data, metadata, layer_type + + def test_image(self): + layers = napari_get_reader(str(self.path))() + self.assert_layers(layers, True, False) + + def test_labels(self): + filename = str(self.path.join("labels")) + layers = napari_get_reader(filename)() + self.assert_layers(layers, False, True) + + def test_label(self): + filename = str(self.path.join("labels", "astronaut")) + layers = napari_get_reader(filename)() + self.assert_layers(layers, False, True) + + @pytest.mark.skipif( + not sys.platform.startswith("darwin"), + reason="Qt builds are failing on Windows and Ubuntu", + ) + def test_viewer(self, make_test_viewer): + """example of testing the viewer.""" + viewer = make_test_viewer() + + shapes = [(4000, 3000), (2000, 1500), (1000, 750), (500, 375)] + np.random.seed(0) + data = [np.random.random(s) for s in shapes] + _ = viewer.add_image(data, multiscale=True, contrast_limits=[0, 1]) + layer = viewer.layers[0] + + # Set canvas size to target amount + viewer.window.qt_viewer.view.canvas.size = (800, 600) + list(viewer.window.qt_viewer.layer_to_visual.values())[0].on_draw(None) + + # Check that current level is first large enough to fill the canvas with + # a greater than one pixel depth + assert layer.data_level == 2 diff --git a/tests/test_node.py b/tests/test_node.py new file mode 100644 index 00000000..dff986d8 --- /dev/null +++ b/tests/test_node.py @@ -0,0 +1,29 @@ +import pytest + +from ome_zarr.data import create_zarr +from ome_zarr.io import parse_url +from ome_zarr.reader import Node + + +class TestNode: + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = tmpdir.mkdir("data") + create_zarr(str(self.path)) + + def test_image(self): + node = Node(parse_url(str(self.path)), list()) + assert node.data + assert node.metadata + + def test_labels(self): + filename = str(self.path.join("labels")) + node = Node(parse_url(filename), list()) + assert not node.data + assert not node.metadata + + def test_label(self): + filename = str(self.path.join("labels", "coins")) + node = Node(parse_url(filename), list()) + assert node.data + assert node.metadata diff --git a/tests/test_ome_zarr.py b/tests/test_ome_zarr.py index c56942b3..7d2505ff 100644 --- a/tests/test_ome_zarr.py +++ b/tests/test_ome_zarr.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- - -from ome_zarr import napari_get_reader, info, download -from .create_test_data import create_zarr -import tempfile -import os import logging +import os + +import pytest + +from ome_zarr.data import astronaut, create_zarr +from ome_zarr.napari import napari_get_reader +from ome_zarr.utils import download, info def log_strings(idx, t, c, z, y, x, ct, cc, cz, cy, cx, dtype): @@ -15,32 +16,29 @@ def log_strings(idx, t, c, z, y, x, ct, cc, cz, cy, cx, dtype): class TestOmeZarr: - @classmethod - def setup_class(cls): - """ setup any state specific to the execution of the given class (which - usually contains tests). - """ - cls.path = tempfile.TemporaryDirectory(suffix=".zarr").name - create_zarr(cls.path) + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = tmpdir.mkdir("data") + create_zarr(str(self.path), method=astronaut) def test_get_reader_hit(self): - reader = napari_get_reader(self.path) + reader = napari_get_reader(str(self.path)) assert reader is not None assert callable(reader) def test_reader(self): - reader = napari_get_reader(self.path) - results = reader(self.path) - assert results is not None and len(results) == 1 - result = results[0] - assert isinstance(result[0], list) - assert isinstance(result[1], dict) - assert result[1]["channel_axis"] == 1 - assert result[1]["name"] == ["Red", "Green", "Blue"] + reader = napari_get_reader(str(self.path)) + results = reader(str(self.path)) + assert len(results) == 2 + image, label = results + assert isinstance(image[0], list) + assert isinstance(image[1], dict) + assert image[1]["channel_axis"] == 1 + assert image[1]["name"] == ["Red", "Green", "Blue"] def test_get_reader_with_list(self): # a better test here would use real data - reader = napari_get_reader([self.path]) + reader = napari_get_reader([str(self.path)]) assert reader is not None assert callable(reader) @@ -55,22 +53,19 @@ def check_info_stdout(self, out): assert log in out # from info's print of omero metadata - assert "'channel_axis': 1" in out - assert "'name': ['Red', 'Green', 'Blue']" in out - assert "'contrast_limits': [[0, 1], [0, 1], [0, 1]]" in out - assert "'visible': [True, True, True]" in out + # note: some metadata is no longer handled by info but rather + # in the ome_zarr.napari.transform method - def test_info(self, capsys, caplog): + def test_info(self, caplog): with caplog.at_level(logging.DEBUG): - info(self.path) + list(info(str(self.path))) self.check_info_stdout(caplog.text) - def test_download(self, capsys, caplog): - target = tempfile.TemporaryDirectory().name - name = "test.zarr" + def test_download(self, capsys, caplog, tmpdir): + target = str(tmpdir / "out") with caplog.at_level(logging.DEBUG): - download(self.path, output_dir=target, zarr_name=name) - download_zarr = os.path.join(target, name) + download(str(self.path), output_dir=target) + download_zarr = os.path.join(target, "data") assert os.path.exists(download_zarr) info(download_zarr) self.check_info_stdout(caplog.text) diff --git a/tests/test_reader.py b/tests/test_reader.py new file mode 100644 index 00000000..f1bc20a1 --- /dev/null +++ b/tests/test_reader.py @@ -0,0 +1,30 @@ +import pytest + +from ome_zarr.data import create_zarr +from ome_zarr.io import parse_url +from ome_zarr.reader import Node, Reader + + +class TestReader: + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = tmpdir.mkdir("data") + create_zarr(str(self.path)) + + def assert_node(self, node: Node): + if not node.data and not node.metadata: + assert False, f"Empty node received: {node}" + + def test_image(self): + reader = Reader(parse_url(str(self.path))) + assert len(list(reader())) == 3 + + def test_labels(self): + filename = str(self.path.join("labels")) + reader = Reader(parse_url(filename)) + assert len(list(reader())) == 3 + + def test_label(self): + filename = str(self.path.join("labels", "coins")) + reader = Reader(parse_url(filename)) + assert len(list(reader())) == 3 diff --git a/tests/test_starting_points.py b/tests/test_starting_points.py new file mode 100644 index 00000000..472c4cf9 --- /dev/null +++ b/tests/test_starting_points.py @@ -0,0 +1,54 @@ +from typing import List, Type + +import pytest + +from ome_zarr.data import create_zarr +from ome_zarr.io import parse_url +from ome_zarr.reader import OMERO, Label, Labels, Multiscales, Node, Spec + + +class TestStartingPoints: + """Creates a small but complete OME-Zarr file and tests that readers will detect the + correct type when starting at all the various levels.""" + + @pytest.fixture(autouse=True) + def initdir(self, tmpdir): + self.path = tmpdir.mkdir("data") + create_zarr(str(self.path)) + + def matches(self, node: Node, expected: List[Type[Spec]]): + found: List[Type[Spec]] = list() + for spec in node.specs: + found.append(type(spec)) + + expected_names = sorted([x.__class__.__name__ for x in expected]) + found_names = sorted([x.__class__.__name__ for x in found]) + assert expected_names == found_names + + def get_spec(self, node: Node, spec_type: Type[Spec]): + for spec in node.specs: + if isinstance(spec, spec_type): + return spec + assert False, f"no {spec_type} found" + + def test_top_level(self): + zarr = parse_url(str(self.path)) + assert zarr is not None + node = Node(zarr, list()) + self.matches(node, {Multiscales, OMERO}) + multiscales = self.get_spec(node, Multiscales) + assert multiscales.lookup("multiscales", []) + + def test_labels(self): + zarr = parse_url(str(self.path + "/labels")) + assert zarr is not None + node = Node(zarr, list()) + self.matches(node, {Labels}) + + def test_label(self): + zarr = parse_url(str(self.path + "/labels/coins")) + assert zarr is not None + node = Node(zarr, list()) + self.matches(node, {Label, Multiscales}) + multiscales = self.get_spec(node, Multiscales) + assert multiscales.lookup("multiscales", [])