Skip to content

Commit

Permalink
Handle use of soft import sub modules for typing.
Browse files Browse the repository at this point in the history
  • Loading branch information
sharkinsspatial committed Nov 18, 2024
1 parent a8cc82f commit a1c1ff1
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 23 deletions.
12 changes: 10 additions & 2 deletions virtualizarr/readers/hdf/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,16 @@

if TYPE_CHECKING:
import h5py # type: ignore
from h5py import Dataset, Group # type: ignore

Check warning on line 14 in virtualizarr/readers/hdf/filters.py

View check run for this annotation

Codecov / codecov/patch

virtualizarr/readers/hdf/filters.py#L13-L14

Added lines #L13 - L14 were not covered by tests

h5py = soft_import("h5py", "For reading hdf files", strict=False)
if h5py:
Dataset = h5py.Dataset
Group = h5py.Group
else:
Dataset = dict()
Group = dict()

Check warning on line 22 in virtualizarr/readers/hdf/filters.py

View check run for this annotation

Codecov / codecov/patch

virtualizarr/readers/hdf/filters.py#L21-L22

Added lines #L21 - L22 were not covered by tests

hdf5plugin = soft_import(
"hdf5plugin", "For reading hdf files with filters", strict=False
)
Expand Down Expand Up @@ -119,7 +127,7 @@ def _filter_to_codec(
return codec


def cfcodec_from_dataset(dataset: h5py.Dataset) -> Codec | None:
def cfcodec_from_dataset(dataset: Dataset) -> Codec | None:
"""
Converts select h5py dataset CF convention attrs to CFCodec
Expand Down Expand Up @@ -166,7 +174,7 @@ def cfcodec_from_dataset(dataset: h5py.Dataset) -> Codec | None:
return None


def codecs_from_dataset(dataset: h5py.Dataset) -> List[Codec]:
def codecs_from_dataset(dataset: Dataset) -> List[Codec]:
"""
Extracts a list of numcodecs from an h5py dataset
Expand Down
37 changes: 16 additions & 21 deletions virtualizarr/readers/hdf/hdf.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import math
from dataclasses import dataclass
from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Union

import numpy as np
from xarray import Dataset, Index, Variable
import xarray as xr
from xarray import Index, Variable

from virtualizarr.manifests import ChunkEntry, ChunkManifest, ManifestArray
from virtualizarr.readers.common import (
Expand All @@ -18,18 +18,15 @@

if TYPE_CHECKING:
import h5py # type: ignore

from h5py import Dataset, Group # type: ignore

Check warning on line 21 in virtualizarr/readers/hdf/hdf.py

View check run for this annotation

Codecov / codecov/patch

virtualizarr/readers/hdf/hdf.py#L20-L21

Added lines #L20 - L21 were not covered by tests

h5py = soft_import("h5py", "For reading hdf files", strict=False)

if not h5py:

@dataclass
class h5py_stub:
Group: str
Dataset: str

h5py = h5py_stub(Group="", Dataset="")
if h5py:
Dataset = h5py.Dataset
Group = h5py.Group
else:
Dataset = dict()
Group = dict()

Check warning on line 29 in virtualizarr/readers/hdf/hdf.py

View check run for this annotation

Codecov / codecov/patch

virtualizarr/readers/hdf/hdf.py#L28-L29

Added lines #L28 - L29 were not covered by tests


class HDFVirtualBackend(VirtualBackend):
Expand All @@ -42,7 +39,7 @@ def open_virtual_dataset(
decode_times: bool | None = None,
indexes: Mapping[str, Index] | None = None,
reader_options: Optional[dict] = None,
) -> Dataset:
) -> xr.Dataset:
drop_variables, loadable_variables = check_for_collisions(
drop_variables,
loadable_variables,
Expand Down Expand Up @@ -79,9 +76,7 @@ def open_virtual_dataset(
)

@staticmethod
def _dataset_chunk_manifest(
path: str, dataset: h5py.Dataset
) -> Optional[ChunkManifest]:
def _dataset_chunk_manifest(path: str, dataset: Dataset) -> Optional[ChunkManifest]:
"""
Generate ChunkManifest for HDF5 dataset.
Expand Down Expand Up @@ -148,7 +143,7 @@ def add_chunk_info(blob):
return chunk_manifest

@staticmethod
def _dataset_dims(dataset: h5py.Dataset) -> Union[List[str], List[None]]:
def _dataset_dims(dataset: Dataset) -> Union[List[str], List[None]]:
"""
Get a list of dimension scale names attached to input HDF5 dataset.
Expand Down Expand Up @@ -190,14 +185,14 @@ def _dataset_dims(dataset: h5py.Dataset) -> Union[List[str], List[None]]:
return dims

@staticmethod
def _extract_attrs(h5obj: Union[h5py.Dataset, h5py.Group]):
def _extract_attrs(h5obj: Union[Dataset, h5py.Group]):
"""
Extract attributes from an HDF5 group or dataset.
Parameters
----------
h5obj : h5py.Group or h5py.Dataset
An HDF5 group or dataset.
An h5py group or dataset.
"""
_HIDDEN_ATTRS = {
"REFERENCE_LIST",
Expand Down Expand Up @@ -236,7 +231,7 @@ def _extract_attrs(h5obj: Union[h5py.Dataset, h5py.Group]):
return attrs

@staticmethod
def _dataset_to_variable(path: str, dataset: h5py.Dataset) -> Optional[Variable]:
def _dataset_to_variable(path: str, dataset: Dataset) -> Optional[Variable]:
"""
Extract an xarray Variable with ManifestArray data from an h5py dataset
Expand Down Expand Up @@ -338,7 +333,7 @@ def _virtual_vars_from_hdf(
variables = {}
for key in g.keys():
if key not in drop_variables:
if isinstance(g[key], h5py.Dataset):
if isinstance(g[key], Dataset):
variable = HDFVirtualBackend._dataset_to_variable(path, g[key])
if variable is not None:
variables[key] = variable
Expand Down

0 comments on commit a1c1ff1

Please sign in to comment.