Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Mpes tweaks #136

Merged
merged 9 commits into from
Oct 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2,396 changes: 1,265 additions & 1,131 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ dask = ">=2021.12.0, <2023.0.0"
fastdtw = "^0.3.4"
fastparquet = "^0.8.0"
h5py = "^3.6.0"
ipympl = "^0.9.1"
ipywidgets = "^7.7.1"
ipympl = ">=0.9.1"
ipywidgets = ">=7.7.1"
lmfit = "^1.0.3"
matplotlib = "^3.5.1"
natsort = "^8.1.0"
Expand All @@ -21,7 +21,7 @@ numpy = ">=1.18,<1.22"
opencv-python = "<=4.8.0.74"
pandas = "^1.4.1"
psutil = "^5.9.0"
pynxtools = "^0.0.2"
pynxtools = ">=0.0.2"
pyyaml = "^6.0.0"
scipy = "^1.8.0"
symmetrize = "^0.5.5"
Expand Down
51 changes: 45 additions & 6 deletions sed/calibrator/momentum.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ def __init__(
self.vvdist: float = np.nan
self.rdeform_field: np.ndarray = None
self.cdeform_field: np.ndarray = None
self.rdeform_field_bkp: np.ndarray = None
self.cdeform_field_bkp: np.ndarray = None
self.inverse_dfield: np.ndarray = None
self.dfield_updated: bool = False
self.transformations: Dict[Any, Any] = {}
Expand All @@ -108,12 +110,8 @@ def __init__(

self.x_column = self._config["dataframe"]["x_column"]
self.y_column = self._config["dataframe"]["y_column"]
self.corrected_x_column = self._config["dataframe"][
"corrected_x_column"
]
self.corrected_y_column = self._config["dataframe"][
"corrected_y_column"
]
self.corrected_x_column = self._config["dataframe"]["corrected_x_column"]
self.corrected_y_column = self._config["dataframe"]["corrected_y_column"]
self.kx_column = self._config["dataframe"]["kx_column"]
self.ky_column = self._config["dataframe"]["ky_column"]

Expand Down Expand Up @@ -613,6 +611,7 @@ def spline_warp_estimate(
use_center: bool = None,
fixed_center: bool = True,
interp_order: int = 1,
verbose: bool = True,
**kwds,
) -> np.ndarray:
"""Estimate the spline deformation field using thin plate spline registration.
Expand All @@ -629,6 +628,8 @@ def spline_warp_estimate(
interp_order (int, optional):
Order of interpolation (see ``scipy.ndimage.map_coordinates()``).
Defaults to 1.
verbose (bool, optional): Option to report the used landmarks for correction.
Defaults to True.
**kwds: keyword arguments:

- **landmarks**: (list/array): Landmark positions (row, column) used
Expand Down Expand Up @@ -712,6 +713,10 @@ def spline_warp_estimate(
splinewarp[1],
)

# save backup copies to reset transformations
self.rdeform_field_bkp = self.rdeform_field
self.cdeform_field_bkp = self.cdeform_field

self.correction["applied"] = True
self.correction["pouter"] = self.pouter_ord
self.correction["pcent"] = np.asarray(self.pcent)
Expand All @@ -723,6 +728,12 @@ def spline_warp_estimate(
if self.slice is not None:
self.slice_corrected = corrected_image

if verbose:
print("Calulated thin spline correction based on the following landmarks:")
print(f"pouter: {self.pouter}")
if use_center:
print(f"pcent: {self.pcent}")

return corrected_image

def apply_correction(
Expand Down Expand Up @@ -966,6 +977,8 @@ def pose_adjustment(
ytrans: float = 0,
angle: float = 0,
apply: bool = False,
reset: bool = True,
verbose: bool = True,
):
"""Interactive panel to adjust transformations that are applied to the image.
Applies first a scaling, next a x/y translation, and last a rotation around
Expand All @@ -983,12 +996,23 @@ def pose_adjustment(
apply (bool, optional):
Option to directly apply the provided transformations.
Defaults to False.
reset (bool, optional):
Option to reset the correction before transformation. Defaults to True.
verbose (bool, optional):
Option to report the performed transformations. Defaults to True.
"""
matplotlib.use("module://ipympl.backend_nbagg")
source_image = self.slice_corrected

transformed_image = source_image

if reset:
if self.rdeform_field_bkp is not None and self.cdeform_field_bkp is not None:
self.rdeform_field = self.rdeform_field_bkp
self.cdeform_field = self.cdeform_field_bkp
else:
self.reset_deformation()

fig, ax = plt.subplots(1, 1)
img = ax.imshow(transformed_image.T, origin="lower", cmap="terrain_r")
center = self._config["momentum"]["center_pixel"]
Expand Down Expand Up @@ -1050,6 +1074,7 @@ def update(scale: float, xtrans: float, ytrans: float, angle: float):
step=1,
)
angle_slider = ipw.FloatSlider(value=angle, min=-180, max=180, step=1)
results_box = ipw.Output()
ipw.interact(
update,
scale=scale_slider,
Expand All @@ -1066,6 +1091,9 @@ def apply_func(apply: bool): # pylint: disable=unused-argument
yscale=self.transformations["scale"],
keep=True,
)
if verbose:
with results_box:
print(f"Applied scaling with scale={self.transformations['scale']}.")
if (
self.transformations.get("xtrans", 0) != 0
or self.transformations.get("ytrans", 0) != 0
Expand All @@ -1076,13 +1104,24 @@ def apply_func(apply: bool): # pylint: disable=unused-argument
ytrans=self.transformations["ytrans"],
keep=True,
)
if verbose:
with results_box:
print(
f"Applied translation with (xtrans={self.transformations['xtrans']},",
f"ytrans={self.transformations['ytrans']}).",
)
if self.transformations.get("angle", 0) != 0:
self.coordinate_transform(
transform_type="rotation",
angle=self.transformations["angle"],
center=center,
keep=True,
)
if verbose:
with results_box:
print(f"Applied rotation with angle={self.transformations['angle']}.")

display(results_box)

img.set_data(self.slice_transformed.T)
axmin = np.min(self.slice_transformed, axis=(0, 1))
Expand Down
11 changes: 10 additions & 1 deletion sed/core/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,7 @@ def pose_adjustment(
angle: float = 0,
apply: bool = False,
use_correction: bool = True,
reset: bool = True,
):
"""3. step of the distortion correction workflow: Generate an interactive panel
to adjust affine transformations that are applied to the image. Applies first
Expand All @@ -531,6 +532,8 @@ def pose_adjustment(
transformations. Defaults to False.
use_correction (bool, option): Whether to use the spline warp correction
or not. Defaults to True.
reset (bool, optional):
Option to reset the correction before transformation. Defaults to True.
"""
# Generate homomorphy as default if no distortion correction has been applied
if self.mc.slice_corrected is None:
Expand All @@ -544,7 +547,7 @@ def pose_adjustment(
self.mc.reset_deformation()

if self.mc.cdeform_field is None or self.mc.rdeform_field is None:
# Generate default distortion correction
# Generate distortion correction from config values
self.mc.add_features()
self.mc.spline_warp_estimate()

Expand All @@ -554,6 +557,7 @@ def pose_adjustment(
ytrans=ytrans,
angle=angle,
apply=apply,
reset=reset,
)

# 5. Apply the momentum correction to the dataframe
Expand Down Expand Up @@ -1483,6 +1487,8 @@ def save(
config["nexus"]["definition"]
- **input_files**: A list of input files to pass to the reader.
Defaults to config["nexus"]["input_files"]
- **eln_data**: An electronic-lab-notebook file in '.yaml' format
to add to the list of files to pass to the reader.
"""
if self._binned is None:
raise NameError("Need to bin data first!")
Expand Down Expand Up @@ -1520,6 +1526,9 @@ def save(
if isinstance(input_files, str):
input_files = [input_files]

if "eln_data" in kwds:
input_files.append(kwds.pop("eln_data"))

to_nexus(
data=self._binned,
faddr=faddr,
Expand Down
65 changes: 53 additions & 12 deletions sed/loader/mpes/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
@author: L. Rettig
"""
import datetime
import glob
import json
import os
import urllib
Expand All @@ -19,6 +20,7 @@
import h5py
import numpy as np
import scipy.interpolate as sint
from natsort import natsorted

from sed.loader.base.loader import BaseLoader

Expand Down Expand Up @@ -362,14 +364,30 @@ def read_dataframe(
Tuple[ddf.DataFrame, dict]: Dask dataframe and metadata read from specified
files.
"""
# pylint: disable=duplicate-code
super().read_dataframe(
files=files,
folders=folders,
runs=runs,
ftype=ftype,
metadata=metadata,
)
# if runs is provided, try to locate the respective files relative to the provided folder.
if runs is not None: # pylint: disable=duplicate-code
files = []
if isinstance(runs, (str, int)):
runs = [runs]
for run in runs:
files.extend(
self.get_files_from_run_id(run_id=run, folders=folders, extension=ftype),
)
self.runs = list(runs)
super().read_dataframe(
files=files,
ftype=ftype,
metadata=metadata,
)
else:
# pylint: disable=duplicate-code
super().read_dataframe(
files=files,
folders=folders,
runs=runs,
ftype=ftype,
metadata=metadata,
)

hdf5_groupnames = kwds.pop(
"hdf5_groupnames",
Expand Down Expand Up @@ -425,22 +443,45 @@ def get_files_from_run_id(
self,
run_id: str,
folders: Union[str, Sequence[str]] = None,
extension: str = None,
extension: str = "h5",
**kwds,
) -> List[str]:
"""Locate the files for a given run identifier.

Args:
run_id (str): The run identifier to locate.
folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw
data is located. Defaults to None.
data is located. Defaults to config["core"]["base_folder"]
extension (str, optional): The file extension. Defaults to "h5".
kwds: Keyword arguments

Return:
str: Path to the location of run data.
List[str]: List of file path strings to the location of run data.
"""
raise NotImplementedError
if folders is None:
folders = self._config["core"]["paths"]["data_raw_dir"]

if isinstance(folders, str):
folders = [folders]

files: List[str] = []
for folder in folders:
run_files = natsorted(
glob.glob(
folder + "/**/Scan" + str(run_id).zfill(4) + "_*." + extension,
recursive=True,
),
)
files.extend(run_files)

# Check if any files are found
if not files:
raise FileNotFoundError(
f"No files found for run {run_id} in directory {str(folders)}",
)

# Return the list of found files
return files

def gather_metadata(
self,
Expand Down
4 changes: 3 additions & 1 deletion tests/data/loader/mpes/config.yaml
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
test:
core:
paths:
data_raw_dir: "tests/data/loader/mpes/"
2 changes: 1 addition & 1 deletion tests/loader/test_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
test_data_dir = os.path.join(package_dir, "..", "tests", "data")

read_types = ["one_file", "files", "one_folder", "folders", "one_run", "runs"]
runs = {"generic": None, "mpes": None, "flash": ["43878", "43878"]}
runs = {"generic": None, "mpes": ["30", "50"], "flash": ["43878", "43878"]}


def get_loader_name_from_loader_object(loader: BaseLoader) -> str:
Expand Down