Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support HSDS server with omas_h5.py function #313

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 30 additions & 13 deletions omas/omas_h5.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from .omas_core import ODS


def dict2hdf5(filename, dictin, groupname='', recursive=True, lists_as_dicts=False, compression=None):
def dict2hdf5(filename, dictin, groupname='', recursive=True, lists_as_dicts=False, compression=None, hsds=False):
"""
Utility function to save hierarchy of dictionaries containing numpy-compatible objects to hdf5 file

Expand All @@ -22,12 +22,17 @@ def dict2hdf5(filename, dictin, groupname='', recursive=True, lists_as_dicts=Fal
:param lists_as_dicts: convert lists to dictionaries with integer strings

:param compression: gzip compression level

:param hsds: use HSDS (HDF5 in the remote server)
"""
import h5py
if hsds:
import h5pyd as h5py
else:
import h5py

if isinstance(filename, str):
with h5py.File(filename, 'w') as g:
dict2hdf5(g, dictin, recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression)
dict2hdf5(g, dictin, recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression, hsds=hsds)
return
else:
parent = filename
Expand All @@ -47,11 +52,11 @@ def dict2hdf5(filename, dictin, groupname='', recursive=True, lists_as_dicts=Fal

if isinstance(item, dict):
if recursive:
dict2hdf5(g, item, str(key), recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression)
dict2hdf5(g, item, str(key), recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression,hsds=hsds)

elif lists_as_dicts and isinstance(item, (list, tuple)) and not isinstance(item, numpy.ndarray):
item = {'%d' % k: v for k, v in enumerate(item)}
dict2hdf5(g, item, key, recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression)
dict2hdf5(g, item, key, recursive=recursive, lists_as_dicts=lists_as_dicts, compression=compression,hsds=hsds)

else:
if item is None:
Expand All @@ -75,26 +80,33 @@ def dict2hdf5(filename, dictin, groupname='', recursive=True, lists_as_dicts=Fal
return g


def save_omas_h5(ods, filename):
def save_omas_h5(ods, filename, hsds=False):
"""
Save an ODS to HDF5

:param ods: OMAS data set

:param filename: filename or file descriptor to save to

:param hsds: use HSDS (HDF5 in the remote server)
"""
return dict2hdf5(filename, ods, lists_as_dicts=True)
return dict2hdf5(filename, ods, lists_as_dicts=True, hsds=hsds)


def convertDataset(ods, data):
def convertDataset(ods, data, hsds=False):
"""
Recursive utility function to map HDF5 structure to ODS

:param ods: input ODS to be populated

:param data: HDF5 dataset of group

:param hsds: use HSDS (HDF5 in the remote server)
"""
import h5py
if hsds:
import h5pyd as h5py
else:
import h5py

keys = data.keys()
try:
Expand All @@ -114,10 +126,10 @@ def convertDataset(ods, data):
else:
ods.setraw(item, data[item][()])
elif isinstance(data[item], h5py.Group):
convertDataset(ods.setraw(oitem, ods.same_init_ods()), data[item])
convertDataset(ods.setraw(oitem, ods.same_init_ods()), data[item], hsds=hsds)


def load_omas_h5(filename, consistency_check=True, imas_version=omas_rcparams['default_imas_version'], cls=ODS):
def load_omas_h5(filename, consistency_check=True, imas_version=omas_rcparams['default_imas_version'], cls=ODS, hsds=False):
"""
Load ODS or ODC from HDF5

Expand All @@ -129,13 +141,18 @@ def load_omas_h5(filename, consistency_check=True, imas_version=omas_rcparams['d

:param cls: class to use for loading the data

:param hsds: use HSDS (HDF5 in the remote server)

:return: OMAS data set
"""
import h5py
if hsds:
import h5pyd as h5py
else:
import h5py

ods = cls(imas_version=imas_version, consistency_check=False)
with h5py.File(filename, 'r') as data:
convertDataset(ods, data)
convertDataset(ods, data, hsds=hsds)
ods.consistency_check = consistency_check
return ods

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ boto3 # required
matplotlib # required
scipy # required
h5py # required
h5pyd # required
pymongo # required
dnspython # required
xmltodict # required
Expand Down
Loading