diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 4db8d04..e5696a0 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -2,30 +2,27 @@ name: linting on: [push] +env: + UV_SYSTEM_PYTHON: true + jobs: linting: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python 3.10 - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: "3.10" - - name: Install dependencies - run: | - git submodule sync --recursive - git submodule update --init --recursive --jobs=4 - python -m pip install --upgrade pip - name: Install package run: | - python -m pip install --no-deps . - - name: Install requirements - run: | - python -m pip install -r dev-requirements.txt - - name: ruff + curl -LsSf https://astral.sh/uv/install.sh | sh + uv pip install --no-deps . + uv pip install -r dev-requirements.txt + - name: ruff check run: | - ruff pynxtools_mpes tests - - name: ruff formatting + ruff check pynxtools_mpes tests + - name: ruff format run: | ruff format --check pynxtools_mpes tests - name: mypy diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 2080283..4efeddb 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -9,6 +9,9 @@ on: pull_request: branches: [main] +env: + UV_SYSTEM_PYTHON: true + jobs: pytest: runs-on: ubuntu-latest @@ -18,20 +21,15 @@ jobs: python_version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - submodules: recursive + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python_version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - name: Install package run: | - pip install ".[dev]" + curl -LsSf https://astral.sh/uv/install.sh | sh + uv pip install ".[dev]" - name: Test with pytest run: | pytest tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..81a6893 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,9 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.5.5 + hooks: + # Run the linter. + - id: ruff + # Run the formatter. + - id: ruff-format \ No newline at end of file diff --git a/dev-requirements.txt b/dev-requirements.txt index a0fe340..1398df2 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,9 +1,5 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml -# +# This file was autogenerated by uv via the following command: +# uv pip compile --extra=dev --output-file=dev-requirements.txt pyproject.toml anytree==2.12.1 # via pynxtools ase==3.22.1 @@ -27,8 +23,8 @@ fonttools==4.49.0 # via matplotlib h5py==3.10.0 # via - # pynxtools # pynxtools-mpes (pyproject.toml) + # pynxtools importlib-metadata==7.0.1 # via pynxtools iniconfig==2.0.0 @@ -67,11 +63,13 @@ pandas==2.2.0 # xarray pillow==10.0.1 # via matplotlib +pip==24.1.2 + # via pip-tools pip-tools==7.4.0 # via pynxtools-mpes (pyproject.toml) pluggy==1.4.0 # via pytest -pynxtools==0.4.0 +pynxtools==0.6.0 # via pynxtools-mpes (pyproject.toml) pyparsing==3.1.1 # via matplotlib @@ -88,13 +86,13 @@ python-dateutil==2.8.2 pytz==2024.1 # via pandas pyyaml==6.0.1 - # via - # pynxtools - # pynxtools-mpes (pyproject.toml) -ruff==0.3.4 + # via pynxtools +ruff==0.5.5 # via pynxtools-mpes (pyproject.toml) scipy==1.12.0 # via ase +setuptools==71.1.0 + # via pip-tools six==1.16.0 # via # anytree @@ -116,11 +114,7 @@ wheel==0.42.0 # via pip-tools xarray==2024.2.0 # via - # pynxtools # pynxtools-mpes (pyproject.toml) + # pynxtools zipp==3.17.0 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/pynxtools_mpes/reader.py b/pynxtools_mpes/reader.py index 8a980ad..fb5ad1e 100644 --- a/pynxtools_mpes/reader.py +++ b/pynxtools_mpes/reader.py @@ -17,42 +17,17 @@ # """MPES reader implementation for the DataConverter.""" -import errno import logging -import os from functools import reduce -from typing import Any, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union import h5py import numpy as np import xarray as xr -import yaml -from pynxtools.dataconverter.readers.base.reader import BaseReader -from pynxtools.dataconverter.readers.utils import ( - FlattenSettings, - flatten_and_replace, - parse_flatten_json, -) - -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) - -DEFAULT_UNITS = { - "X": "step", - "Y": "step", - "t": "step", - "tofVoltage": "V", - "extractorVoltage": "V", - "extractorCurrent": "A", - "cryoTemperature": "K", - "sampleTemperature": "K", - "dldTimeBinSize": "ns", - "delay": "ps", - "timeStamp": "s", - "energy": "eV", - "kx": "1/A", - "ky": "1/A", -} +from pynxtools.dataconverter.readers.multi.reader import MultiFormatReader +from pynxtools.dataconverter.readers.utils import parse_yml + +logger = logging.getLogger("pynxtools") def recursive_parse_metadata( @@ -103,7 +78,7 @@ def h5_to_xarray(faddr: str, mode: str = "r") -> xr.DataArray: data = np.asarray(h5_file["binned"]["BinnedData"]) except KeyError as exc: raise ValueError( - f"Wrong Data Format, the BinnedData was not found.", + "Wrong Data Format, the BinnedData was not found.", ) from exc # Reading the axes @@ -116,7 +91,7 @@ def h5_to_xarray(faddr: str, mode: str = "r") -> xr.DataArray: bin_names.append(h5_file["axes"][axis].attrs["name"]) except KeyError as exc: raise ValueError( - f"Wrong Data Format, the axes were not found.", + "Wrong Data Format, the axes were not found.", ) from exc # load metadata @@ -144,15 +119,15 @@ def h5_to_xarray(faddr: str, mode: str = "r") -> xr.DataArray: f"ax{axis}" ].attrs["unit"] except (KeyError, TypeError): - xarray[bin_names[axis]].attrs["unit"] = DEFAULT_UNITS[bin_names[axis]] + xarray[bin_names[axis]].attrs["unit"] = "" try: xarray.attrs["units"] = h5_file["binned"]["BinnedData"].attrs["units"] xarray.attrs["long_name"] = h5_file["binned"]["BinnedData"].attrs[ "long_name" ] except (KeyError, TypeError): - xarray.attrs["units"] = "counts" - xarray.attrs["long_name"] = "photoelectron counts" + xarray.attrs["units"] = "" + xarray.attrs["long_name"] = "" if metadata is not None: xarray.attrs["metadata"] = metadata @@ -169,107 +144,7 @@ def iterate_dictionary(dic, key_string): if not len(keys) == 1: return iterate_dictionary(dic[keys[0]], keys[1]) else: - raise KeyError - return None - - -CONVERT_DICT = { - "Instrument": "INSTRUMENT[instrument]", - "Analyzer": "ELECTRONANALYSER[electronanalyser]", - "Manipulator": "MANIPULATOR[manipulator]", - "Beam": "beamTYPE[beam]", - "unit": "@units", - "Sample": "SAMPLE[sample]", - "Source": "sourceTYPE[source]", - "User": "USER[user]", - "energy_resolution": "energy_resolution/resolution", - "momentum_resolution": "RESOLUTION[momentum_resolution]/resolution", - "temporal_resolution": "RESOLUTION[temporal_resolution]/resolution", - "spatial_resolution": "RESOLUTION[spatial_resolution]/resolution", - "angular_resolution": "RESOLUTION[angular_resolution]/resolution", - "sample_temperature": "temperature_sensor/value", - "drain_current": "drain_current_amperemeter/value", - "photon_energy": "energy", -} - -REPLACE_NESTED = { - "SAMPLE[sample]/chemical_formula": "SAMPLE[sample]/SUBSTANCE[substance]/molecular_formula_hill", - "sourceTYPE[source]/Probe": "sourceTYPE[source_probe]", - "sourceTYPE[source]/Pump": "sourceTYPE[source_pump]", - "beamTYPE[beam]/Probe": "beamTYPE[beam_probe]", - "beamTYPE[beam]/Pump": "beamTYPE[beam_pump]", - "sample_history": "history/notes/description", - "ELECTRONANALYSER[electronanalyser]/RESOLUTION[energy_resolution]": ( - "ELECTRONANALYSER[electronanalyser]/energy_resolution" - ), - "ELECTRONANALYSER[electronanalyser]/RESOLUTION[momentum_resolution]": ( - "ELECTRONANALYSER[electronanalyser]/momentum_resolution" - ), - "ELECTRONANALYSER[electronanalyser]/RESOLUTION[spatial_resolution]": ( - "ELECTRONANALYSER[electronanalyser]/spatial_resolution" - ), - "ELECTRONANALYSER[electronanalyser]/RESOLUTION[angular_resolution]": ( - "ELECTRONANALYSER[electronanalyser]/angular_resolution" - ), - "SAMPLE[sample]/gas_pressure": "INSTRUMENT[instrument]/pressure_gauge/value", - "SAMPLE[sample]/temperature": ( - "INSTRUMENT[instrument]/MANIPULATOR[manipulator]/temperature_sensor/value" - ), -} - - -def handle_h5_and_json_file(file_paths, objects): - """Handle h5 or json input files.""" - x_array_loaded = xr.DataArray() - config_file_dict = {} - eln_data_dict = {} - - for file_path in file_paths: - try: - file_extension = file_path[file_path.rindex(".") :] - except ValueError as exc: - raise ValueError( - f"The file path {file_path} must have an extension.", - ) from exc - - extentions = [".h5", ".json", ".yaml", ".yml"] - if file_extension not in extentions: - print( - f"WARNING \n" - f"The reader only supports files of type {extentions}, " - f"but {file_path} does not match.", - ) - - if not os.path.exists(file_path): - raise FileNotFoundError( - errno.ENOENT, - os.strerror(errno.ENOENT), - file_path, - ) - - if file_extension == ".h5": - x_array_loaded = h5_to_xarray(file_path) - elif file_extension == ".json": - config_file_dict = parse_flatten_json(file_path) - elif file_extension in [".yaml", ".yml"]: - with open(file_path, encoding="utf-8") as feln: - eln_data_dict = flatten_and_replace( - FlattenSettings( - dic=yaml.safe_load(feln), - convert_dict=CONVERT_DICT, - replace_nested=REPLACE_NESTED, - ) - ) - - if objects is not None: - # For the case of a single object - assert isinstance( - objects, - xr.core.dataarray.DataArray, - ), "The given object must be an xarray" - x_array_loaded = objects - - return x_array_loaded, config_file_dict, eln_data_dict + return None def rgetattr(obj, attr): @@ -285,128 +160,93 @@ def _getattr(obj, attr): return reduce(_getattr, [obj] + attr.split(".")) -def fill_data_indices_in_config(config_file_dict, x_array_loaded): - """Add data indices key value pairs to the config_file - dictionary from the xarray dimensions if not already - present. - """ - for key in list(config_file_dict): - if "*" in key: - value = config_file_dict[key] - for dim in x_array_loaded.dims: - new_key = key.replace("*", dim) - new_value = value.replace("*", dim) - - if ( - new_key not in config_file_dict.keys() - and new_value not in config_file_dict.values() - ): - config_file_dict[new_key] = new_value - - config_file_dict.pop(key) +class MPESReader(MultiFormatReader): + """MPES-specific reader class""" + supported_nxdls = ["NXmpes", "NXmpes_arpes"] + config_file: Optional[str] = None + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.eln_data = None + + self.extensions = { + ".yml": self.handle_eln_file, + ".yaml": self.handle_eln_file, + ".json": self.set_config_file, + ".h5": self.handle_hdf5_file, + ".hdf5": self.handle_hdf5_file, + } + + def handle_hdf5_file(self, file_path: str) -> Dict[str, Any]: + """Handle hdf5 file""" + self.data_xarray = h5_to_xarray(file_path) + + return {} + + def set_config_file(self, file_path: str) -> Dict[str, Any]: + if self.config_file is not None: + logger.info(f"Config file already set. Skipping the new file {file_path}.") + self.config_file = file_path + return {} + + def handle_eln_file(self, file_path: str) -> Dict[str, Any]: + self.eln_data = parse_yml( + file_path, + parent_key="/ENTRY", + ) + + return {} + + def get_eln_data(self, key: str, path: str) -> Any: + """Returns data from the given eln path.""" + if self.eln_data is None: + return None + + return self.eln_data.get(path) + + def handle_objects(self, objects: Tuple[Any]) -> Dict[str, Any]: + if isinstance(objects, xr.DataArray): + # Should normally be a tuple, but in the + # past a single xarray object was passed. + # This if-clause exists for backwards compatibility + self.data_xarray = objects + return {} + if ( + isinstance(objects, tuple) + and len(objects) > 0 + and isinstance(objects[0], xr.DataArray) + ): + self.data_xarray = objects[0] + return {} + + logger.info( + f"Error while reading objects: {objects} does not contain an xarray object." + " Skipping the objects." + ) + return {} + + def get_data(self, key: str, path: str) -> Any: + try: + value = rgetattr(obj=self.data_xarray, attr=path) + if path.split("/")[-1] == "@axes": + return list(value) + return value -class MPESReader(BaseReader): - """MPES-specific reader class""" + except ValueError: + logger.warning(f"Incorrect axis name corresponding to the path {path}") - # pylint: disable=too-few-public-methods + except AttributeError: + logger.warning( + "Incorrect naming syntax or the xarray doesn't " + f"contain entry corresponding to the path {path}" + ) - # Whitelist for the NXDLs that the reader supports and can process - supported_nxdls = ["NXmpes", "NXmpes_arpes"] + def get_data_dims(self, key: str, path: str) -> List[str]: + return list(map(str, self.data_xarray.dims)) - def read( # pylint: disable=too-many-branches - self, - template: dict = None, - file_paths: Tuple[str] = None, - objects: Tuple[Any] = None, - ) -> dict: - """Reads data from given file or alternatively an xarray object - and returns a filled template dictionary""" - - if not file_paths: - raise IOError("No input files were given to MPES Reader.") - - ( - x_array_loaded, - config_file_dict, - eln_data_dict, - ) = handle_h5_and_json_file(file_paths, objects) - - fill_data_indices_in_config(config_file_dict, x_array_loaded) - - optional_groups_to_remove = [] - - for key, value in config_file_dict.items(): - if isinstance(value, str) and value.startswith("!"): - optional_groups_to_remove.append(key) - value = value[1:] - - if isinstance(value, str) and ":" in value: - precursor = value.split(":")[0] - value = value[value.index(":") + 1 :] - - # Filling in the data and axes along with units from xarray - if precursor == "@data": - try: - template[key] = rgetattr( - obj=x_array_loaded, - attr=value, - ) - if key.split("/")[-1] == "@axes": - template[key] = list(template[key]) - - except ValueError: - print( - f"Incorrect axis name corresponding to " f"the path {key}", - ) - - except AttributeError: - print( - f"Incorrect naming syntax or the xarray doesn't " - f"contain entry corresponding to the path {key}", - ) - - # Filling in the metadata from xarray - elif precursor == "@attrs": - if key not in eln_data_dict: - try: # Tries to fill the metadata - template[key] = iterate_dictionary( - x_array_loaded.attrs, - value, - ) - - except KeyError: - print( - f"[info]: Path {key} not found. " - f"Skipping the entry.", - ) - - if isinstance(template.get(key), str) and template[key].startswith( - "@link:" - ): - template[key] = {"link": template[key][6:]} - else: - # Fills in the fixed metadata - template[key] = value - - # Filling in ELN metadata and overwriting the common paths by - # giving preference to the ELN metadata - for key, value in eln_data_dict.items(): - template[key] = value - - # remove groups that have required children missing - for key in optional_groups_to_remove: - if template.get(key) is None: - group_to_delete = key.rsplit("/", 1)[0] - logger.info( - f"[info]: Required element {key} not provided. " - f"Removing the parent group {group_to_delete}.", - ) - for temp_key in template.keys(): - if temp_key.startswith(group_to_delete): - del template[temp_key] - - return template + def get_attr(self, key: str, path: str) -> Any: + return iterate_dictionary(self.data_xarray.attrs, path) READER = MPESReader diff --git a/pyproject.toml b/pyproject.toml index 121e4b2..a1740d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,9 +25,8 @@ classifiers = [ ] dependencies = [ "h5py>=3.6.0", - "PyYAML>=6.0", "xarray>=0.20.2", - "pynxtools>=0.5.1" + "pynxtools>=0.6.0", ] [project.urls] @@ -37,7 +36,7 @@ dependencies = [ [project.optional-dependencies] dev = [ "mypy", - "ruff==0.3.4", + "ruff==0.5.5", "pytest", "types-pyyaml", "pip-tools", diff --git a/scripts/regenerate_examples.sh b/scripts/regenerate_examples.sh index f7295b9..83bfbc1 100755 --- a/scripts/regenerate_examples.sh +++ b/scripts/regenerate_examples.sh @@ -7,7 +7,13 @@ function update_mpes_example { dataconverter xarray_saved_small_calibration.h5 config_file.json --reader $READER --nxdl $NXDL --output example.nxs } +function update_mpes_eln_example { + echo "Update mpes example with eln file" + dataconverter xarray_saved_small_calibration.h5 config_file.json eln_data.yaml --reader $READER --nxdl $NXDL --output example_eln.nxs +} + project_dir=$(dirname $(dirname $(realpath $0))) cd $project_dir/tests/data -update_mpes_example \ No newline at end of file +update_mpes_example +update_mpes_eln_example \ No newline at end of file diff --git a/tests/data/config_file.json b/tests/data/config_file.json index cf0934d..a8bfd3a 100644 --- a/tests/data/config_file.json +++ b/tests/data/config_file.json @@ -1,31 +1,31 @@ { "/@default": "entry", - "/ENTRY[entry]/@default": "data", - "/ENTRY[entry]/definition": "NXmpes", - "/ENTRY[entry]/title": "@attrs:metadata/entry_title", - "/ENTRY[entry]/start_time": "@attrs:metadata/timing/acquisition_start", - "/ENTRY[entry]/experiment_institution": "Fritz Haber Institute - Max Planck Society", - "/ENTRY[entry]/experiment_facility": "Time Resolved ARPES", - "/ENTRY[entry]/experiment_laboratory": "Clean Room 4", - "/ENTRY[entry]/entry_identifier": "@attrs:metadata/entry_identifier", - "/ENTRY[entry]/end_time": "@attrs:metadata/timing/acquisition_stop", - "/ENTRY[entry]/duration": "@attrs:metadata/timing/acquisition_duration", - "/ENTRY[entry]/duration/@units": "s", - "/ENTRY[entry]/collection_time": "@attrs:metadata/timing/collection_time", - "/ENTRY[entry]/collection_time/@units": "s", - "/ENTRY[entry]/USER[user]": { + "/ENTRY/@default": "data", + "/ENTRY/definition": "NXmpes", + "/ENTRY/title": "['@eln:/ENTRY/title', '@attrs:metadata/entry_title']", + "/ENTRY/start_time": "@attrs:metadata/timing/acquisition_start", + "/ENTRY/experiment_institution": "Fritz Haber Institute - Max Planck Society", + "/ENTRY/experiment_facility": "Time Resolved ARPES", + "/ENTRY/experiment_laboratory": "Clean Room 4", + "/ENTRY/entry_identifier": "@attrs:metadata/entry_identifier", + "/ENTRY/end_time": "@attrs:metadata/timing/acquisition_stop", + "/ENTRY/duration": "@attrs:metadata/timing/acquisition_duration", + "/ENTRY/duration/@units": "s", + "/ENTRY/collection_time": "@attrs:metadata/timing/collection_time", + "/ENTRY/collection_time/@units": "s", + "/ENTRY/USER[user]": { "name": "@attrs:metadata/user0/name", "role": "@attrs:metadata/user0/role", "affiliation": "@attrs:metadata/user0/affiliation", "address": "@attrs:metadata/user0/address", "email": "@attrs:metadata/user0/email" }, - "/ENTRY[entry]/INSTRUMENT[instrument]": { + "/ENTRY/INSTRUMENT[instrument]": { "name": "Time-of-flight momentum microscope equipped delay line detector, at the endstation of the high rep-rate HHG source at FHI", "name/@short_name": "TR-ARPES @ FHI", "energy_resolution": { - "resolution": 140.0, - "resolution/@units": "meV", + "resolution": "['@eln:/ENTRY/Instrument/energy_resolution', '140.0']", + "resolution/@units": "['@eln:/ENTRY/Instrument/energy_resolution/@units', 'meV']", "physical_quantity": "energy", "type": "estimated" }, @@ -100,7 +100,7 @@ } } }, - "/ENTRY[entry]/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/COLLECTIONCOLUMN[collectioncolumn]": { + "/ENTRY/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/COLLECTIONCOLUMN[collectioncolumn]": { "projection": "@attrs:metadata/instrument/analyzer/projection", "scheme": "momentum dispersive", "lens_mode": "@attrs:metadata/instrument/analyzer/lens_mode", @@ -138,14 +138,14 @@ } } }, - "/ENTRY[entry]/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/ENERGYDISPERSION[energydispersion]": { + "/ENTRY/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/ENERGYDISPERSION[energydispersion]": { "pass_energy": "@attrs:metadata/file/KTOF:Lens:TOF:V", "pass_energy/@units": "eV", "scheme": "tof", "tof_distance": 0.9, "tof_distance/@units": "m" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/DETECTOR[detector]": { + "/ENTRY/INSTRUMENT[instrument]/ELECTRONANALYSER[electronanalyser]/DETECTOR[detector]": { "amplifier_type": "MCP", "detector_type": "DLD", "sensor_pixels": [ @@ -160,7 +160,7 @@ "detector_voltage": "@attrs:metadata/file/KTOF:Lens:UDLD:V", "detector_voltage/@units": "V" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/sourceTYPE[source_probe]": { + "/ENTRY/INSTRUMENT[instrument]/sourceTYPE[source_probe]": { "name": "HHG @ TR-ARPES @ FHI", "probe": "photon", "type": "HHG laser", @@ -169,7 +169,7 @@ "frequency/@units": "kHz", "associated_beam": "/entry/instrument/beam_probe" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/beamTYPE[beam_probe]": { + "/ENTRY/INSTRUMENT[instrument]/beamTYPE[beam_probe]": { "distance": 0.0, "distance/@units": "mm", "incident_energy": "@attrs:metadata/instrument/beam/probe/incident_energy", @@ -184,7 +184,7 @@ "extent/@units": "µm", "associated_source": "/entry/instrument/source_probe" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/sourceTYPE[source_pump]": { + "/ENTRY/INSTRUMENT[instrument]/sourceTYPE[source_pump]": { "name": "OPCPA @ TR-ARPES @ FHI", "probe": "visible light", "type": "Optical Laser", @@ -193,7 +193,7 @@ "frequency/@units": "kHz", "associated_beam": "/entry/instrument/beam_pump" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/beamTYPE[beam_pump]": { + "/ENTRY/INSTRUMENT[instrument]/beamTYPE[beam_pump]": { "distance": 0.0, "distance/@units": "mm", "incident_energy": "@attrs:metadata/instrument/beam/pump/incident_energy", @@ -216,7 +216,7 @@ "fluence/@units": "mJ/cm^2", "associated_source": "/entry/instrument/source_pump" }, - "/ENTRY[entry]/INSTRUMENT[instrument]/MANIPULATOR[manipulator]": { + "/ENTRY/INSTRUMENT[instrument]/MANIPULATOR[manipulator]": { "temperature_sensor": { "name": "sample_temperature", "measurement": "temperature", @@ -260,7 +260,7 @@ ] } }, - "/ENTRY[entry]/SAMPLE[sample]": { + "/ENTRY/SAMPLE[sample]": { "preparation_date": "@attrs:metadata/sample/preparation_date", "history/notes/type": "text/plain", "history/notes/description": "@attrs:metadata/sample/sample_history", @@ -344,7 +344,7 @@ ] } }, - "/ENTRY[entry]/PROCESS_MPES[process]/DISTORTION[distortion]": { + "/ENTRY/PROCESS_MPES[process]/DISTORTION[distortion]": { "symmetry": "@attrs:metadata/momentum_correction/rotsym", "symmetry/@units": "", "original_centre": "@attrs:metadata/momentum_correction/pcent", @@ -356,7 +356,7 @@ "rdeform_field": "@attrs:metadata/momentum_correction/rdeform_field", "rdeform_field/@units": "" }, - "/ENTRY[entry]/PROCESS_MPES[process]/REGISTRATION[registration]": { + "/ENTRY/PROCESS_MPES[process]/REGISTRATION[registration]": { "depends_on": "/entry/process/registration/tranformations/rot_z", "TRANSFORMATIONS[tranformations]": { "AXISNAME[trans_x]": "@attrs:metadata/momentum_correction/adjust_params/xtrans", @@ -377,7 +377,7 @@ "AXISNAME[rot_z]/@depends_on": "trans_y" } }, - "/ENTRY[entry]/PROCESS_MPES[process]/energy_calibration":{ + "/ENTRY/PROCESS_MPES[process]/energy_calibration":{ "coefficients": "@attrs:metadata/energy_correction/calibration/coeffs", "coefficients/@units": "", "fit_function": "@attrs:metadata/energy_correction/calibration/fit_function", @@ -387,7 +387,7 @@ "calibrated_axis/@units": "eV", "physical_quantity": "energy" }, - "/ENTRY[entry]/PROCESS_MPES[process]/CALIBRATION[kx_calibration]": { + "/ENTRY/PROCESS_MPES[process]/CALIBRATION[kx_calibration]": { "scaling": "@attrs:metadata/momentum_correction/calibration/scale_kx", "scaling/@units": "", "offset": "@attrs:metadata/momentum_correction/offset_kx", @@ -396,7 +396,7 @@ "calibrated_axis/@units": "1/angstrom", "physical_quantity": "momentum" }, - "/ENTRY[entry]/PROCESS_MPES[process]/CALIBRATION[ky_calibration]": { + "/ENTRY/PROCESS_MPES[process]/CALIBRATION[ky_calibration]": { "scaling": "@attrs:metadata/momentum_correction/calibration/scale_ky", "scaling/@units": "", "offset": "@attrs:metadata/momentum_correction/offset_ky", @@ -405,7 +405,7 @@ "calibrated_axis/@units": "1/angstrom", "physical_quantity": "momentum" }, - "/ENTRY[entry]/data": { + "/ENTRY/data": { "@axes": "@data:dims", "AXISNAME_indices[@*_indices]": "@data:*.index", "@signal": "data", diff --git a/tests/data/eln_data.yaml b/tests/data/eln_data.yaml index d6a61b1..19d9af6 100644 --- a/tests/data/eln_data.yaml +++ b/tests/data/eln_data.yaml @@ -1,109 +1,8 @@ -title: Valence Band Dynamics - 1030 nm linear p-polarized pump, 0.6 mJ/cm2 absorbed fluence +title: Title from ELN file Instrument: energy_resolution: - unit: meV - value: 140.0 - momentum_resolution: - unit: 1/angstrom - value: 0.08 - temporal_resolution: - unit: fs - value: 35.0 - Analyzer: - energy_resolution: - unit: eV - value: 110.0 - momentum_resolution: - unit: 1/angstrom - value: 0.08 - slow_axes: delay - spatial_resolution: - unit: µm - value: 10.0 - Manipulator: - sample_temperature: - unit: K - value: 300.0 - Source: - Probe: - frequency: - unit: KHz - value: 500.0 - photon_energy: - unit: eV - value: 21.7 - Pump: - frequency: - unit: KHz - value: 500.0 - photon_energy: - unit: eV - value: 1.55 - Beam: - Probe: - extent: - unit: µm - value: - - 80.0 - - 80.0 - incident_energy: - unit: eV - value: 21.7 - incident_energy_spread: - unit: eV - value: 0.11 - incident_polarization: - - 1 - - 1 - - 0 - - 0 - pulse_duration: - unit: fs - value: 20.0 - Pump: - extent: - unit: µm - value: - - 230.0 - - 265.0 - incident_energy: - unit: eV - value: 1.55 - incident_energy_spread: - unit: eV - value: 0.08 - incident_polarization: - - 1 - - -1 - - 0 - - 0 - incident_wavelength: - unit: nm - value: 800.0 - average_power: - unit: mW - value: 300.0 - pulse_energy: - unit: µJ - value: 0.6 - fluence: - unit: mJ / cm ** 2 - value: 0.15 - pulse_duration: - unit: fs - value: 35.0 + unit: eV + value: 0.14 Sample: - chemical_formula: WSe2 - description: Sample - name: WSe2 Single Crystal - preparation_date: "2019-01-13T09:00:00+00:00" - pressure: - unit: bar - value: 5.0e-14 - sample_history: Cleaved -User: - address: Faradayweg 4-6, 14915 Berlin - affiliation: Fritz Haber Institute of the Max Planck Society - email: maklar@fhi-berlin.mpg.de - name: Julian Maklar - role: Principal Investigator + name: My ELN sample name + preparation_date: "2019-01-13T09:00:00+00:00" \ No newline at end of file diff --git a/tests/data/example.nxs b/tests/data/example.nxs index f44ff44..cbf0321 100644 Binary files a/tests/data/example.nxs and b/tests/data/example.nxs differ diff --git a/tests/data/example_eln.nxs b/tests/data/example_eln.nxs new file mode 100644 index 0000000..d7baa11 Binary files /dev/null and b/tests/data/example_eln.nxs differ diff --git a/tests/data/xarray_saved_small_calibration.h5 b/tests/data/xarray_saved_small_calibration.h5 index 8fe2abc..3cfaaf9 100644 Binary files a/tests/data/xarray_saved_small_calibration.h5 and b/tests/data/xarray_saved_small_calibration.h5 differ diff --git a/tests/test_reader.py b/tests/test_reader.py index 1920a45..e54d589 100644 --- a/tests/test_reader.py +++ b/tests/test_reader.py @@ -2,10 +2,8 @@ Basic example based test for the stm reader """ -import os from pathlib import Path -import pynxtools.dataconverter.convert as dataconverter from pynxtools.testing.nexus_conversion import ReaderTest @@ -30,18 +28,23 @@ def test_nexus_conversion(caplog, tmp_path): test.check_reproducibility_of_nexus() -def test_eln_data(tmp_path): - """Check if the subsections in the eln_data.yml file work.""" +def test_conversion_w_eln_data(caplog, tmp_path): + """ + Tests the conversion with additional ELN data + """ + caplog.clear() dir_path = Path(__file__).parent / "data" - dataconverter.convert( - input_file=( + test = ReaderTest( + nxdl="NXmpes", + reader_name="mpes", + files_or_dir=[ str(dir_path / "xarray_saved_small_calibration.h5"), str(dir_path / "config_file.json"), str(dir_path / "eln_data.yaml"), - ), - reader="mpes", - nxdl="NXmpes", - output=os.path.join(tmp_path, "mpes.small_test.nxs"), - skip_verify=False, - ignore_undocumented=False, + str(dir_path / "example_eln.nxs"), + ], + tmp_path=tmp_path, + caplog=caplog, ) + test.convert_to_nexus(caplog_level="WARNING", ignore_undocumented=False) + test.check_reproducibility_of_nexus()