diff --git a/poetry.lock b/poetry.lock index 4b9e5bd..a558613 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -1029,17 +1029,14 @@ optional = false python-versions = ">=3.7" files = [ {file = "fabio-2023.10.0-cp310-cp310-macosx_10_9_arm64.whl", hash = "sha256:d459ad935a7ecb52182958fcac82357c3a6a40aad99c1b18bbd2c3e32bc0e11c"}, - {file = "fabio-2023.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3db70d9675c595db3f80aa207e324987beb8b751ba0decbb9096fde7164714ca"}, {file = "fabio-2023.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9516ba9653cb9fbeee4566101c3e5319fe37a5fcc4f336ced65a49cf2031e03"}, {file = "fabio-2023.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a2882c969e3776ed39749a8f287f57750604df0f496f6b40cb6c23bab237171"}, {file = "fabio-2023.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:4c441fdd0cc55a6dbb3e858f99ed7083a34c4e6b7ffab2cd4c4988affa07a51f"}, {file = "fabio-2023.10.0-cp311-cp311-macosx_10_9_arm64.whl", hash = "sha256:aca8edecdde4d8ef94491aa4e54c2dcd3a56915d6828f2de42e16469347e6cdb"}, - {file = "fabio-2023.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1e86de26183d5be77e91ec53d8fddfb31896efa6bdfcfe2078737a731f3f7277"}, {file = "fabio-2023.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ea70ec9c2a8e969e867d24cd694c98d89195e3fef895c841b933635e15cc32a"}, {file = "fabio-2023.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1cbe926dd5c9d8cad321876066cc5aa53a92221e2a5556e477e5243193eb39"}, {file = "fabio-2023.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a9975f12d2b117910da8f8c1e42e4527f530b5a0045f35813e8cb2a1ebb4586e"}, {file = "fabio-2023.10.0-cp312-cp312-macosx_10_9_arm64.whl", hash = "sha256:13e26f9daddb542316690370a5eba486267a4efb3b09c1c0cbaec650a39409e3"}, - {file = "fabio-2023.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ae09f12aafd9f6187f9b001b96ecbb44b0b041e5f6c42037a8f65b882e9d2a49"}, {file = "fabio-2023.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7d58f315a957132c5477b313b48a4e0a0f7c89139259f38bb7c990486ba4676"}, {file = "fabio-2023.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:d6c8784fa3677a0634306fb400edef01f6f15222b6eb843c2b054838bd268839"}, {file = "fabio-2023.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f6ca9c22c27bc0940b825c34d4e73f067c914b4257f19cb6f0ba7a9599625e9e"}, @@ -1575,6 +1572,16 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +[[package]] +name = "imutils" +version = "0.5.4" +description = "A series of convenience functions to make basic image processing functions such as translation, rotation, resizing, skeletonization, displaying Matplotlib images, sorting contours, detecting edges, and much more easier with OpenCV and both Python 2.7 and Python 3." +optional = false +python-versions = "*" +files = [ + {file = "imutils-0.5.4.tar.gz", hash = "sha256:03827a9fca8b5c540305c0844a62591cf35a0caec199cb0f2f0a4a0fb15d8f24"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -3396,6 +3403,32 @@ numba = ["llvmlite (<0.32.0)", "numba", "numba (<0.49.0)"] scipy = ["scipy"] testing = ["pytest", "pytest-cov"] +[[package]] +name = "opencv-python" +version = "4.9.0.80" +description = "Wrapper package for OpenCV python bindings." +optional = false +python-versions = ">=3.6" +files = [ + {file = "opencv-python-4.9.0.80.tar.gz", hash = "sha256:1a9f0e6267de3a1a1db0c54213d022c7c8b5b9ca4b580e80bdc58516c922c9e1"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:7e5f7aa4486651a6ebfa8ed4b594b65bd2d2f41beeb4241a3e4b1b85acbbbadb"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71dfb9555ccccdd77305fc3dcca5897fbf0cf28b297c51ee55e079c065d812a3"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b34a52e9da36dda8c151c6394aed602e4b17fa041df0b9f5b93ae10b0fcca2a"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4088cab82b66a3b37ffc452976b14a3c599269c247895ae9ceb4066d8188a57"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-win32.whl", hash = "sha256:dcf000c36dd1651118a2462257e3a9e76db789a78432e1f303c7bac54f63ef6c"}, + {file = "opencv_python-4.9.0.80-cp37-abi3-win_amd64.whl", hash = "sha256:3f16f08e02b2a2da44259c7cc712e779eff1dd8b55fdb0323e8cab09548086c0"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.8\""}, + {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, + {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, + {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, + {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.5", markers = "python_version >= \"3.11\""}, +] + [[package]] name = "orix" version = "0.11.1" @@ -4341,7 +4374,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4349,16 +4381,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -4375,7 +4399,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -4383,7 +4406,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -5576,7 +5598,6 @@ description = "Functions for 3D coordinate transformations" optional = false python-versions = ">=3.6" files = [ - {file = "transforms3d-0.4.1-py3-none-any.whl", hash = "sha256:aea08776c1c915c8b424418994202aced8e46301c375ce63423d14f1d0045aa7"}, {file = "transforms3d-0.4.1.tar.gz", hash = "sha256:31c755266a0b0a222488b8d039f6f325cf486c52728c03e307ce047b2fad1179"}, ] @@ -5873,4 +5894,4 @@ notebook = ["ipykernel", "jupyter", "jupyterlab-h5web"] [metadata] lock-version = "2.0" python-versions = ">=3.8, <3.12" -content-hash = "c4fe90b5828072083f2eafe88745208ae58005be3e432f48ae408ded2d31bd94" +content-hash = "b8d20fe0458ae230391a8297e82c9e41fdaaa50a87a5f6efed626db40ed08d6a" diff --git a/pyproject.toml b/pyproject.toml index a053dca..bf02e42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,10 +20,12 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8, <3.12" h5py = ">=3.6.0" +imutils = ">=0.5.4" ipympl = ">=0.9.1" ipywidgets = ">=8.1.1" matplotlib = ">=3.5.1" numpy = ">=1.21.6" +opencv-python = ">=4.8.1.78" pynxtools = ">=0.0.9" python-dateutil = ">=2.8.2" pyyaml = ">=6.0" diff --git a/specsanalyzer/core.py b/specsanalyzer/core.py index 7873700..c432292 100755 --- a/specsanalyzer/core.py +++ b/specsanalyzer/core.py @@ -8,6 +8,7 @@ from typing import Tuple from typing import Union +import imutils import ipywidgets as ipw import matplotlib import matplotlib.pyplot as plt @@ -102,15 +103,14 @@ def convert_image( Args: raw_img (np.ndarray): Raw image data, numpy 2d matrix - lens_mode (str): analzser lens mode, check calib2d for a list - of modes Camelcase naming convention e.g. "WideAngleMode" + lens_mode (str): analzser lens mode, check calib2d for a list of modes CamelCase naming + convention e.g. "WideAngleMode" kinetic_energy (float): set analyser kinetic energy pass_energy (float): set analyser pass energy work_function (float): set analyser work function Returns: - xr.DataArray: xarray containg the corrected data and kinetic - and angle axis + xr.DataArray: xarray containg the corrected data and kinetic and angle axis """ apply_fft_filter = kwds.pop( @@ -131,7 +131,11 @@ def convert_image( else: img = raw_img - # TODO add image rotation + rotation_angle = kwds.pop("rotation_angle", self._config.get("rotation_angle", 0)) + + if rotation_angle: + img_rotated = imutils.rotate(img, angle=rotation_angle) + img = img_rotated # look for the lens mode in the dictionary try: @@ -434,7 +438,7 @@ def crop_tool( value=vline_range, min=data_array.Ekin[0], max=data_array.Ekin[-1], - step=0.1, + step=0.01, ) hline_slider = ipw.FloatRangeSlider( description="Angle", @@ -507,6 +511,7 @@ def cropit(val): # pylint: disable=unused-argument - data_array.coords[data_array.dims[0]][0] ) ).item() + self._config["crop"] = True ax.cla() self._data_array.plot(ax=ax, add_colorbar=False) diff --git a/specsscan/core.py b/specsscan/core.py index 6e58dec..5198705 100755 --- a/specsscan/core.py +++ b/specsscan/core.py @@ -5,16 +5,18 @@ import os import pathlib from importlib.util import find_spec +from logging import warn from pathlib import Path from typing import Any from typing import Dict +from typing import List from typing import Sequence from typing import Union import matplotlib import numpy as np import xarray as xr - +from tqdm.auto import tqdm from specsanalyzer import SpecsAnalyzer from specsanalyzer.config import parse_config @@ -23,6 +25,7 @@ from specsanalyzer.io import to_tiff from specsscan.helpers import find_scan from specsscan.helpers import get_coords +from specsscan.helpers import get_scan_path from specsscan.helpers import handle_meta from specsscan.helpers import load_images from specsscan.helpers import parse_info_to_dict @@ -113,12 +116,7 @@ def load_scan( self, scan: int, path: Union[str, Path] = "", - iterations: Union[ - np.ndarray, - slice, - Sequence[int], - Sequence[slice], - ] = None, + iterations: Union[np.ndarray, slice, Sequence[int], Sequence[slice]] = None, metadata: dict = None, **kwds, ) -> xr.DataArray: @@ -147,40 +145,23 @@ def load_scan( and optionally a third scanned axis (for ex., delay, temperature) as coordinates. """ - - if path: - path = Path(path).joinpath(str(scan).zfill(4)) - if not path.is_dir(): - raise FileNotFoundError( - f"The provided path {path} was not found.", - ) - else: - # search for the given scan using the default path - path = Path(self._config["data_path"]) - # path_scan = sorted(path.glob(f"20[1,2][9,0-9]/*/*/Raw Data/{scan}")) - path_scan_list = find_scan(path, scan) - if not path_scan_list: - raise FileNotFoundError( - f"Scan number {scan} not found", - ) - path = path_scan_list[0] - - df_lut = parse_lut_to_df(path) # TODO: storing metadata from df_lut + scan_path = get_scan_path(path, scan, self._config["data_path"]) + df_lut = parse_lut_to_df(scan_path) data = load_images( - scan_path=path, + scan_path=scan_path, df_lut=df_lut, iterations=iterations, tqdm_enable_nested=self._config["enable_nested_progress_bar"], ) - self._scan_info = parse_info_to_dict(path) + self._scan_info = parse_info_to_dict(scan_path) config_meta = copy.deepcopy(self.config) config_meta["spa_params"].pop("calib2d_dict") loader_dict = { "iterations": iterations, - "scan_path": path, + "scan_path": scan_path, "raw_data": data, "convert_config": config_meta["spa_params"], } @@ -193,31 +174,47 @@ def load_scan( self._scan_info["WorkFunction"], ) - xr_list = [] - for image in data: - xr_list.append( - self.spa.convert_image( - image, - lens_mode, - kin_energy, - pass_energy, - work_function, - **kwds, - ), - ) - self.spa.print_msg = False - self.spa.print_msg = True - coords, dim = get_coords( - scan_path=path, + scan_path=scan_path, scan_type=scan_type, scan_info=self._scan_info, df_lut=df_lut, ) if scan_type == "single": - res_xarray = xr_list[0] + res_xarray = self.spa.convert_image( + raw_img=data[0], + lens_mode=lens_mode, + kinetic_energy=kin_energy, + pass_energy=pass_energy, + work_function=work_function, + **kwds, + ) + elif scan_type == "voltage": # and dim == "kinetic energy": + res_xarray = self.process_sweep_scan( + raw_data=data, + kinetic_energy=coords, + pass_energy=pass_energy, + lens_mode=lens_mode, + work_function=work_function, + **kwds, + ) else: + xr_list = [] + for image in data: + xr_list.append( + self.spa.convert_image( + raw_img=image, + lens_mode=lens_mode, + kinetic_energy=kin_energy, + pass_energy=pass_energy, + work_function=work_function, + **kwds, + ), + ) + self.spa.print_msg = False + self.spa.print_msg = True + res_xarray = xr.concat( xr_list, dim=xr.DataArray( @@ -251,6 +248,7 @@ def load_scan( "/entry/sample/transformations/sample_azimuth": "Azimuth", } + # store link information for resolved axis coordinates for k, v in depends_dict.items(): if v in axis_dict: self._scan_info[axis_dict[v]] = "@link:/entry/data/" + k @@ -278,16 +276,28 @@ def load_scan( return res_xarray - def crop_tool(self, **kwds): + def crop_tool(self, scan: int = None, path: Union[Path, str] = "", **kwds): """ Croping tool interface to crop_tool method of the SpecsAnalyzer class. """ matplotlib.use("module://ipympl.backend_nbagg") - try: - image = self.metadata["loader"]["raw_data"][0] - except KeyError as exc: - raise ValueError("No image loaded, load image first!") from exc + if scan is not None: + scan_path = get_scan_path(path, scan, self._config["data_path"]) + df_lut = parse_lut_to_df(scan_path) + + data = load_images( + scan_path=scan_path, + tqdm_enable_nested=self._config["enable_nested_progress_bar"], + ) + image = data[0] + self._scan_info = parse_info_to_dict(scan_path) + else: + try: + image = self.metadata["loader"]["raw_data"][0] + except KeyError as exc: + raise ValueError("No image loaded, load image first!") from exc + self.spa.crop_tool( image, self._scan_info["LensMode"], @@ -321,40 +331,25 @@ def check_scan( Returns: A 3-D numpy array of dimensions (Ekin, K, Iterations) """ + scan_path = get_scan_path(path, scan, self._config["data_path"]) + df_lut = parse_lut_to_df(scan_path) - if path: - path = Path(path).joinpath(str(scan).zfill(4)) - if not path.is_dir(): - raise FileNotFoundError( - f"The provided path {path} was not found.", - ) - else: - # search for the given scan using the default path - path = Path(self._config["data_path"]) - path_scan_list = find_scan(path, scan) - if not path_scan_list: - raise FileNotFoundError( - f"Scan number {scan} not found", - ) - path = path_scan_list[0] - - df_lut = parse_lut_to_df(path) - - data = load_images( - scan_path=path, + data, df_lut = load_images( + scan_path=scan_path, df_lut=df_lut, delays=delays, tqdm_enable_nested=self._config["enable_nested_progress_bar"], ) - self._scan_info = parse_info_to_dict(path) + + self._scan_info = parse_info_to_dict(scan_path) config_meta = copy.deepcopy(self.config) config_meta["spa_params"].pop("calib2d_dict") loader_dict = { "delays": delays, - "scan_path": path, + "scan_path": scan_path, "raw_data": load_images( # AVG data - path, + scan_path, df_lut, ), "convert_config": config_meta["spa_params"], @@ -388,7 +383,7 @@ def check_scan( self.spa.print_msg = True dims = get_coords( - scan_path=path, + scan_path=scan_path, scan_type=scan_type, scan_info=self._scan_info, df_lut=df_lut, @@ -509,3 +504,85 @@ def save( raise NotImplementedError( f"Unrecognized file format: {extension}.", ) + + def process_sweep_scan( + self, + raw_data: List[np.ndarray], + kinetic_energy: np.ndarray, + pass_energy: float, + lens_mode: str, + work_function: float, + **kwds, + ) -> xr.DataArray: + """Process sweep scan by interpolating each frame onto a common grid given by the voltage + step, and summing over all frames. + + Args: + raw_data (List[np.ndarray]): List of raw data images + kinetic_energy (np.ndarray): Array of analyzer set kinetic energy values + pass_energy (float): set analyser pass energy + lens_mode (str): analzser lens mode, check calib2d for a list of modes CamelCase naming + convention e.g. "WideAngleMode" + work_function (float): set analyser work function + + Returns: + xr.DataArray: Converted sweep scan + """ + ekin_step = kinetic_energy[1] - kinetic_energy[0] + if not (np.diff(kinetic_energy) == ekin_step).all(): + warn( + "Conversion of sweep scans with non-equidistant energy steps " + "might produce wrong results!", + ) + + # convert first image + converted = self.spa.convert_image( + raw_data[0], + lens_mode, + kinetic_energy[0], + pass_energy, + work_function, + **kwds, + ) + # check for crop parameters + if ( + not {"ang_range_min", "ang_range_max", "ek_range_min", "ek_range_max"}.issubset( + set(self.spa.config.keys()), + ) + or not self.spa.config["crop"] + ): + warn("No valid cropping parameters found, consider using crop_tool() to set.") + + e_step = converted.Ekin[1] - converted.Ekin[0] + e0 = converted.Ekin[-1] - ekin_step + e1 = converted.Ekin[0] + kinetic_energy[-1] - kinetic_energy[0] + data = xr.DataArray( + data=np.zeros((len(converted.Angle), len(np.arange(e0, e1, e_step)))), + coords={"Angle": converted.Angle, "Ekin": np.arange(e0, e1, e_step)}, + dims=["Angle", "Ekin"], + ) + for i, ekin in enumerate(tqdm(kinetic_energy)): + self.spa.print_msg = False + converted = self.spa.convert_image( + raw_img=raw_data[i], + lens_mode=lens_mode, + kinetic_energy=ekin, + pass_energy=pass_energy, + work_function=work_function, + **kwds, + ) + energies = converted.Ekin.where( + (converted.Ekin >= data.Ekin[0]) & (converted.Ekin < data.Ekin[-1]), + 0, + ) + energy_indices = np.argwhere(energies.values).squeeze() + # filling target array using "nearest" method + target_energy = data.Ekin.sel(Ekin=converted.Ekin[energies > 0], method="nearest") + target_indices = np.argwhere(np.in1d(data.Ekin.values, target_energy.values)).squeeze() + data[:, target_indices] += converted[:, energy_indices].values + + self.spa.print_msg = True + # Strip first and last energy points, as they are not fully filled + data = data[:, 1:-1] + + return data diff --git a/specsscan/helpers.py b/specsscan/helpers.py index a39614d..e1e1e87 100644 --- a/specsscan/helpers.py +++ b/specsscan/helpers.py @@ -19,6 +19,40 @@ from specsanalyzer.config import complete_dictionary # name can be generalized +def get_scan_path(path: Union[Path, str], scan: int, basepath: Union[Path, str]) -> Path: + """Returns the path to the given scan. + + Args: + path (Union[Path, str]): Path under which to search. If empty, the basepath will be queried + scan (int): Scan number + basepath (Union[Path, str]): Default base path to search for scans under + + Raises: + FileNotFoundError: Raised if the path or scan cannot be found. + + Returns: + Path: Path object to the given scan directory + """ + if path: + path = Path(path).joinpath(str(scan).zfill(4)) + if not path.is_dir(): + raise FileNotFoundError( + f"The provided path {path} was not found.", + ) + else: + # search for the given scan using the default path + path = Path(basepath) + # path_scan = sorted(path.glob(f"20[1,2][9,0-9]/*/*/Raw Data/{scan}")) + path_scan_list = find_scan(path, scan) + if not path_scan_list: + raise FileNotFoundError( + f"Scan number {scan} not found", + ) + path = path_scan_list[0] + + return path + + def load_images( scan_path: Path, df_lut: Union[pd.DataFrame, None] = None, @@ -67,7 +101,6 @@ def load_images( Returns: List[np.ndarray]: A list of 2-D numpy arrays of raw data """ - scan_list = sorted( file.stem for file in scan_path.joinpath("AVG").iterdir() if file.suffix == ".tsv" ) diff --git a/tests/data b/tests/data index d48b11d..594c376 160000 --- a/tests/data +++ b/tests/data @@ -1 +1 @@ -Subproject commit d48b11dc275ae49d0220e605a2a12e3fef90f141 +Subproject commit 594c37660eb089d1b5fb205db63bd4a238dc7845 diff --git a/tests/test_specsscan.py b/tests/test_specsscan.py index 5cb6c94..b34a63e 100755 --- a/tests/test_specsscan.py +++ b/tests/test_specsscan.py @@ -21,3 +21,32 @@ def test_default_config(): assert isinstance(sps.config, dict) assert "spa_params" in sps.config.keys() assert sps.config["spa_params"]["apply_fft_filter"] is False + + +def test_process_sweep_scan(): + """Test the conversion of a sweep scan""" + config = { + "spa_params": { + "ek_range_min": 0.07597844332538357, + "ek_range_max": 0.8965456312395133, + "ang_range_min": 0.16732026143790849, + "ang_range_max": 0.8449673202614381, + "Ang_Offset_px": 13, + "rotation_angle": 2, + "crop": True, + }, + } + sps = SpecsScan( + config=config, + user_config=package_dir + "/config/example_config_FHI.yaml", + system_config={}, + ) + res_xarray = sps.load_scan( + scan=6455, + path=package_dir + "/../tests/data/", + ) + assert res_xarray.energy[0].values.item() == 20.953256232558136 + assert res_xarray.energy[-1].values.item() == 21.02424460465116 + assert ( + (res_xarray.sum(axis=0) - res_xarray.sum(axis=0).mean()) < 0.1 * res_xarray.sum(axis=0) + ).all() diff --git a/tutorial/convert_file_tests.ipynb b/tutorial/convert_file_tests.ipynb index 40117cf..a4e5073 100644 --- a/tutorial/convert_file_tests.ipynb +++ b/tutorial/convert_file_tests.ipynb @@ -63,7 +63,7 @@ "with open(raw_image_name) as file:\n", " tsv_data = np.loadtxt(file, delimiter=\"\\t\")\n", "plt.figure()\n", - "h = plt.contourf(tsv_data)\n", + "h = plt.imshow(tsv_data)\n", "plt.colorbar()\n", "print(\"The shape of the raw data is: \", tsv_data.shape)\n", "# plt.xlim(120,150)\n", @@ -124,7 +124,7 @@ "# calib2d_dictionary=specsanalyzer.io.parse_calib2d_to_dict('./config/phoibos150.calib2d')\n", "\n", "# settings function used to get configuration files\n", - "config_file = specsanalyzer.settings.parse_config(calib2d_dictionary)\n", + "config_file = specsanalyzer.config.parse_config(calib2d_dictionary)\n", "\n", "\n", "# get the das and the rr vector\n", @@ -164,7 +164,7 @@ { "cell_type": "code", "execution_count": null, - "id": "59ee269d", + "id": "cd5a2937", "metadata": {}, "outputs": [], "source": [ @@ -529,8 +529,8 @@ "python_data[np.isnan(python_data)] = 0\n", "igor_data[np.isnan(igor_data)] = 0\n", "# normalize to unit amplitude\n", - "python_data /= igor_data.max()\n", - "igor_data /= igor_data.max()" + "#python_data /= igor_data.max()\n", + "#igor_data /= igor_data.max()" ] }, { @@ -541,9 +541,9 @@ "outputs": [], "source": [ "fig, (ax1, ax2, ax3) = plt.subplots(ncols=3, figsize=(10, 4))\n", - "subplot1 = ax1.contourf(python_data)\n", - "subplot2 = ax2.contourf(igor_data)\n", - "subplot3 = ax3.contourf((python_data - igor_data))\n", + "subplot1 = ax1.imshow(python_data)\n", + "subplot2 = ax2.imshow(igor_data)\n", + "subplot3 = ax3.imshow((python_data - igor_data))\n", "fig.colorbar(subplot1, ax=ax1)\n", "fig.colorbar(subplot2, ax=ax2)\n", "fig.colorbar(subplot3, ax=ax3)" @@ -634,6 +634,119 @@ "id": "c8b84c75", "metadata": {}, "outputs": [], + "source": [ + "with open(\"../../testdata_converted.txt\") as file:\n", + " txt_data = np.loadtxt(file, delimiter=\"\\r\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f40dc591", + "metadata": {}, + "outputs": [], + "source": [ + "txt_data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1c5c78d8", + "metadata": {}, + "outputs": [], + "source": [ + "txt_data_array = np.reshape(txt_data, (256, 344))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8270886a", + "metadata": {}, + "outputs": [], + "source": [ + "txt_data_array" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33c934bd", + "metadata": {}, + "outputs": [], + "source": [ + "plt.imshow(txt_data_array)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d8c1753", + "metadata": {}, + "outputs": [], + "source": [ + "plt.imshow(python_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c2b5974c", + "metadata": {}, + "outputs": [], + "source": [ + "plt.imshow(igor_data-530*txt_data_array)\n", + "plt.colorbar()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17050ed2", + "metadata": {}, + "outputs": [], + "source": [ + "igor_data.sum().sum()/tsv_data.sum().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d20dde6", + "metadata": {}, + "outputs": [], + "source": [ + "igor_data.sum().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b9d3754", + "metadata": {}, + "outputs": [], + "source": [ + "igor_data2 = igor_data/igor_data.sum().sum()*tsv_data.sum().sum()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "07cce131", + "metadata": {}, + "outputs": [], + "source": [ + "plt.imshow(igor_data2-txt_data_array)\n", + "plt.colorbar()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "384e981a", + "metadata": {}, + "outputs": [], "source": [] } ], @@ -653,7 +766,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.4" + "version": "3.8.12" }, "vscode": { "interpreter": { diff --git a/tutorial/load_sweep_scan.ipynb b/tutorial/load_sweep_scan.ipynb new file mode 100644 index 0000000..25cbead --- /dev/null +++ b/tutorial/load_sweep_scan.ipynb @@ -0,0 +1,290 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## This is an example showcasing the loading of trARPES data as collected using the Phoibos detector at FHI Berlin.\n", + "The band dispersion is loaded as a xarray dataframe following a conversion to the [NeXus format](https://manual.nexusformat.org/classes/contributed_definitions/NXmpes.html#nxmpes) using the [Nomad Parser Nexus](https://github.com/nomad-coe/nomad-parser-nexus)." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, the SpecsScan class is imported which has the scan loader as its class method." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "from specsscan import SpecsScan\n", + "from pathlib import Path\n", + "from matplotlib import pyplot as plt" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, a SpecsScan class instance is created as per the configuration provided in [config.yaml](../tests/data/config.yaml). The user may set the entries in config.yaml file, for example, the data path and conversion parameters as per the requirements before creating this instance" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### The path may changed to point to the scan folder of the data of interest (for example, on a server drive)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "config = {\"spa_params\": {\n", + " 'ek_range_min': 0.07597844332538357,\n", + " 'ek_range_max': 0.8965456312395133,\n", + " 'ang_range_min': 0.16732026143790849,\n", + " 'ang_range_max': 0.8449673202614381,\n", + " \"Ang_Offset_px\":13,\n", + " \"rotation_angle\": 2,\n", + " \"crop\":True,\n", + "}}\n", + "sps = SpecsScan(config=config, user_config=\"../specsscan/config/example_config_FHI.yaml\")\n", + "path = \"../tests/data/\" # Path to the test data set" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "metadata = {}\n", + "# manual Meta data. These should ideally come from an Electronic Lab Notebook.\n", + "#General\n", + "metadata['experiment_summary'] = 'TbTe3 tilt map around EF with 800nm pump at 20fs after pump-probe overlap'\n", + "metadata['entry_title'] = 'TbTe3 XUV Fermi surface map at 20 fs'\n", + "metadata['experiment_title'] = 'TbTe3 XUV Fermi surface map at 20 fs'\n", + "\n", + "#User\n", + "# Fill general parameters of NXuser\n", + "# TODO: discuss how to deal with multiple users?\n", + "metadata['user0'] = {}\n", + "metadata['user0']['name'] = 'Laurenz Rettig'\n", + "metadata['user0']['role'] = 'Principal Investigator'\n", + "metadata['user0']['affiliation'] = 'Fritz Haber Institute of the Max Planck Society'\n", + "metadata['user0']['address'] = 'Faradayweg 4-6, 14195 Berlin'\n", + "metadata['user0']['email'] = 'rettig@fhi-berlin.mpg.de'\n", + "\n", + "metadata['user1'] = {}\n", + "metadata['user1']['name'] = 'William Windsor'\n", + "metadata['user1']['role'] = 'Principal Investigator'\n", + "metadata['user1']['affiliation'] = 'Fritz Haber Institute of the Max Planck Society'\n", + "metadata['user1']['address'] = 'Faradayweg 4-6, 14195 Berlin'\n", + "metadata['user1']['email'] = 'windsor@fhi-berlin.mpg.de'\n", + "\n", + "metadata['instrument'] = {}\n", + "# energy resolution\n", + "metadata['instrument']['energy_resolution'] = 150.\n", + "metadata['instrument']['electronanalyser'] = {}\n", + "metadata['instrument']['electronanalyser']['angular_resolution'] = 0.2\n", + "\n", + "#probe beam\n", + "metadata['instrument']['beam']={}\n", + "metadata['instrument']['beam']['probe']={}\n", + "metadata['instrument']['beam']['probe']['incident_energy'] = 21.7\n", + "metadata['instrument']['beam']['probe']['incident_energy_spread'] = 0.11\n", + "metadata['instrument']['beam']['probe']['pulse_duration'] = 20.\n", + "metadata['instrument']['beam']['probe']['frequency'] = 500.\n", + "metadata['instrument']['beam']['probe']['incident_polarization'] = [1, 1, 0, 0] # p pol Stokes vector\n", + "metadata['instrument']['beam']['probe']['extent'] = [80., 80.]\n", + "#pump beam\n", + "metadata['instrument']['beam']['pump']={}\n", + "metadata['instrument']['beam']['pump']['incident_energy'] = 1.55\n", + "metadata['instrument']['beam']['pump']['incident_energy_spread'] = 0.08\n", + "metadata['instrument']['beam']['pump']['pulse_duration'] = 35.\n", + "metadata['instrument']['beam']['pump']['frequency'] = 500.\n", + "metadata['instrument']['beam']['pump']['incident_polarization'] = [1, -1, 0, 0] # s pol Stokes vector\n", + "metadata['instrument']['beam']['pump']['incident_wavelength'] = 800.\n", + "metadata['instrument']['beam']['pump']['average_power'] = 224.\n", + "metadata['instrument']['beam']['pump']['pulse_energy'] = metadata['instrument']['beam']['pump']['average_power']/metadata['instrument']['beam']['pump']['frequency']#µJ\n", + "metadata['instrument']['beam']['pump']['extent'] = [300/4*2.34, 270/4*2.35] #Gaussian 4sigma -> FWHM\n", + "metadata['instrument']['beam']['pump']['fluence'] = 1.00\n", + "metadata['instrument']['beam']['pump']['delay'] = 0.02\n", + "\n", + "#sample\n", + "metadata['sample']={}\n", + "metadata['sample']['preparation_date'] = '2017-03-19T10:00:00+00:00'\n", + "metadata['sample']['preparation_description'] = 'Cleaved'\n", + "metadata['sample']['sample_history'] = 'Cleaved in UHV'\n", + "metadata['sample']['chemical_formula'] = 'TbTe3'\n", + "metadata['sample']['description'] = 'cleaved single crystal of TbTe3'\n", + "metadata['sample']['name'] = 'TbTe3 Single Crystal'\n", + "\n", + "#metadata[\"scan_info\"] = {}\n", + "#metadata[\"scan_info\"][\"trARPES:XGS600:PressureAC:P_RD\"] = 2.5E-11\n", + "#metadata[\"scan_info\"][\"trARPES:Carving:TEMP_RBV\"] = 70\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#sps.crop_tool(scan=6455)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The load_scan method loads the scan as an xarray along with the metadata needed for nexus conversion. The progress bars can be activated by changing the config parameter, enable_nested_progress_bar, to true in config.yaml " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "res_xarray = sps.load_scan(\n", + " scan=6455, # Scan number for an example mirror scan\n", + " metadata=metadata,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plt.figure()\n", + "res_xarray.plot()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sps.save(\"Scan6455.nxs\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The metadata associated with the scan is added as an attribute to the xarray" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "res_xarray.attrs[\"metadata\"].keys()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "raw = res_xarray.attrs[\"metadata\"][\"loader\"][\"raw_data\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "energies = res_xarray.attrs[\"metadata\"][\"scan_info\"][\"KineticEnergy\"]" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## View the data with H5Web\n", + "H5Web is a tool for visualizing any data in the h5 data format. Since the NeXus format builds opon h5 it can be used to view this data as well. We just import the package and call H5Web with the output filename from the convert command above. \n", + "\n", + "You can also view this data with the H5Viewer or other tools from your local filesystem." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from jupyterlab_h5web import H5Web" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "H5Web(Scan6455.nxs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "specenv38", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + }, + "vscode": { + "interpreter": { + "hash": "a164666994e9db75450cd7016dd7e51d42ea6e7c1e5e8017af1f8068ca906367" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}