From 13c51a49940e3aa43cf2f114c46210f60ae4af53 Mon Sep 17 00:00:00 2001 From: sharkinsspatial Date: Mon, 18 Nov 2024 14:59:46 -0500 Subject: [PATCH] Handle use of soft import sub modules for typing. --- virtualizarr/readers/hdf/hdf.py | 36 ++++++++++++++------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/virtualizarr/readers/hdf/hdf.py b/virtualizarr/readers/hdf/hdf.py index a942268c..4e34596e 100644 --- a/virtualizarr/readers/hdf/hdf.py +++ b/virtualizarr/readers/hdf/hdf.py @@ -1,9 +1,9 @@ import math -from dataclasses import dataclass from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Union import numpy as np -from xarray import Dataset, Index, Variable +import xarray as xr +from xarray import Index, Variable from virtualizarr.manifests import ChunkEntry, ChunkManifest, ManifestArray from virtualizarr.readers.common import ( @@ -19,17 +19,13 @@ if TYPE_CHECKING: import h5py # type: ignore - h5py = soft_import("h5py", "For reading hdf files", strict=False) - -if not h5py: - - @dataclass - class h5py_stub: - Group: str - Dataset: str - - h5py = h5py_stub(Group="", Dataset="") +if h5py: + Dataset = h5py.Dataset + Group = h5py.Group +else: + Dataset = dict() + Group = dict() class HDFVirtualBackend(VirtualBackend): @@ -42,7 +38,7 @@ def open_virtual_dataset( decode_times: bool | None = None, indexes: Mapping[str, Index] | None = None, reader_options: Optional[dict] = None, - ) -> Dataset: + ) -> xr.Dataset: drop_variables, loadable_variables = check_for_collisions( drop_variables, loadable_variables, @@ -79,9 +75,7 @@ def open_virtual_dataset( ) @staticmethod - def _dataset_chunk_manifest( - path: str, dataset: h5py.Dataset - ) -> Optional[ChunkManifest]: + def _dataset_chunk_manifest(path: str, dataset: Dataset) -> Optional[ChunkManifest]: """ Generate ChunkManifest for HDF5 dataset. @@ -148,7 +142,7 @@ def add_chunk_info(blob): return chunk_manifest @staticmethod - def _dataset_dims(dataset: h5py.Dataset) -> Union[List[str], List[None]]: + def _dataset_dims(dataset: Dataset) -> Union[List[str], List[None]]: """ Get a list of dimension scale names attached to input HDF5 dataset. @@ -190,14 +184,14 @@ def _dataset_dims(dataset: h5py.Dataset) -> Union[List[str], List[None]]: return dims @staticmethod - def _extract_attrs(h5obj: Union[h5py.Dataset, h5py.Group]): + def _extract_attrs(h5obj: Union[Dataset, h5py.Group]): """ Extract attributes from an HDF5 group or dataset. Parameters ---------- h5obj : h5py.Group or h5py.Dataset - An HDF5 group or dataset. + An h5py group or dataset. """ _HIDDEN_ATTRS = { "REFERENCE_LIST", @@ -236,7 +230,7 @@ def _extract_attrs(h5obj: Union[h5py.Dataset, h5py.Group]): return attrs @staticmethod - def _dataset_to_variable(path: str, dataset: h5py.Dataset) -> Optional[Variable]: + def _dataset_to_variable(path: str, dataset: Dataset) -> Optional[Variable]: """ Extract an xarray Variable with ManifestArray data from an h5py dataset @@ -338,7 +332,7 @@ def _virtual_vars_from_hdf( variables = {} for key in g.keys(): if key not in drop_variables: - if isinstance(g[key], h5py.Dataset): + if isinstance(g[key], Dataset): variable = HDFVirtualBackend._dataset_to_variable(path, g[key]) if variable is not None: variables[key] = variable