diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6eb876ee..35b642a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,10 +15,10 @@ repos: # Ruff version. rev: v0.1.7 hooks: - # Run the linter. - - id: ruff # Run the formatter. - id: ruff-format + # Run the linter. + - id: ruff - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.7.1 hooks: diff --git a/sed/calibrator/delay.py b/sed/calibrator/delay.py index f4c029a0..55fabd06 100644 --- a/sed/calibrator/delay.py +++ b/sed/calibrator/delay.py @@ -1,6 +1,7 @@ """sed.calibrator.delay module. Code for delay calibration. """ from copy import deepcopy +from datetime import datetime from typing import Any from typing import Dict from typing import List @@ -53,7 +54,7 @@ def append_delay_axis( df: Union[pd.DataFrame, dask.dataframe.DataFrame], adc_column: str = None, delay_column: str = None, - calibration: dict = None, + calibration: Dict[str, Any] = None, adc_range: Union[Tuple, List, np.ndarray] = None, delay_range: Union[Tuple, List, np.ndarray] = None, time0: float = None, @@ -62,7 +63,7 @@ def append_delay_axis( p1_key: str = None, p2_key: str = None, t0_key: str = None, - **kwargs, + verbose: bool = True, ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: """Calculate and append the delay axis to the events dataframe, by converting values from an analog-digital-converter (ADC). @@ -93,6 +94,8 @@ def append_delay_axis( Defaults to config["delay"]["p2_key"] t0_key (str, optional): hdf5 key for t0 value (mm). Defaults to config["delay"]["t0_key"] + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. Raises: ValueError: Raised if delay parameters are not found in the file. @@ -102,28 +105,34 @@ def append_delay_axis( Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added column and delay calibration metdata dictionary. """ - if len(kwargs) > 0: - print(f"WARNING: arguments {kwargs.keys()} are not used in mpes delay calibration.") # pylint: disable=duplicate-code if calibration is None: - if self.calibration: - calibration = deepcopy(self.calibration) - else: - calibration = deepcopy( - self._config["delay"].get( - "calibration", - {}, - ), + calibration = deepcopy(self.calibration) + + if ( + adc_range is not None + or delay_range is not None + or time0 is not None + or delay_range_mm is not None + or datafile is not None + ): + calibration = {} + calibration["creation_date"] = datetime.now().timestamp() + if adc_range is not None: + calibration["adc_range"] = adc_range + if delay_range is not None: + calibration["delay_range"] = delay_range + if time0 is not None: + calibration["time0"] = time0 + if delay_range_mm is not None: + calibration["delay_range_mm"] = delay_range_mm + else: + # report usage of loaded parameters + if "creation_date" in calibration and verbose: + datestring = datetime.fromtimestamp(calibration["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", ) - - if adc_range is not None: - calibration["adc_range"] = adc_range - if delay_range is not None: - calibration["delay_range"] = delay_range - if time0 is not None: - calibration["time0"] = time0 - if delay_range_mm is not None: - calibration["delay_range_mm"] = delay_range_mm + print(f"Using delay calibration parameters generated on {datestring}") if adc_column is None: adc_column = self.adc_column @@ -158,7 +167,8 @@ def append_delay_axis( calibration["datafile"] = datafile calibration["delay_range_mm"] = (ret[0], ret[1]) calibration["time0"] = ret[2] - print(f"Extract delay range from file '{datafile}'.") + if verbose: + print(f"Extract delay range from file '{datafile}'.") else: raise NotImplementedError( "Not enough parameters for delay calibration.", @@ -170,7 +180,9 @@ def append_delay_axis( calibration["time0"], ), ) - print(f"delay_range (ps) = {calibration['delay_range']}") + if verbose: + print(f"Converted delay_range (ps) = {calibration['delay_range']}") + calibration["creation_date"] = datetime.now().timestamp() if "delay_range" in calibration.keys(): df[delay_column] = calibration["delay_range"][0] + ( @@ -178,28 +190,38 @@ def append_delay_axis( ) * (calibration["delay_range"][1] - calibration["delay_range"][0]) / ( calibration["adc_range"][1] - calibration["adc_range"][0] ) + self.calibration = deepcopy(calibration) + if verbose: + print( + "Append delay axis using delay_range = " + f"[{calibration['delay_range'][0]}, {calibration['delay_range'][1]}]" + " and adc_range = " + f"[{calibration['adc_range'][0]}, {calibration['adc_range'][1]}]", + ) else: raise NotImplementedError metadata = {"calibration": calibration} - return df, metadata def add_offsets( self, df: dask.dataframe.DataFrame, + offsets: Dict[str, Any] = None, constant: float = None, flip_delay_axis: bool = None, columns: Union[str, Sequence[str]] = None, - weights: Union[float, Sequence[float]] = None, + weights: Union[float, Sequence[float]] = 1.0, preserve_mean: Union[bool, Sequence[bool]] = False, reductions: Union[str, Sequence[str]] = None, delay_column: str = None, + verbose: bool = True, ) -> Tuple[dask.dataframe.DataFrame, dict]: """Apply an offset to the delay column based on a constant or other columns. Args: df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + offsets (Dict, optional): Dictionary of delay offset parameters. constant (float, optional): The constant to shift the delay axis by. flip_delay_axis (bool, optional): Whether to flip the time axis. Defaults to False. columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. @@ -211,27 +233,93 @@ def add_offsets( of dask.dataframe.Series. For example "mean". In this case the function is applied to the column to generate a single value for the whole dataset. If None, the shift is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + delay_column (str, optional): Name of the column containing the delay values. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. Returns: dask.dataframe.DataFrame: Dataframe with the shifted delay axis. dict: Metadata dictionary. """ + if offsets is None: + offsets = deepcopy(self.offsets) + if delay_column is None: delay_column = self.delay_column + metadata: Dict[str, Any] = { "applied": True, } - if columns is None and constant is None: - # load from config - # pylint: disable=duplicate-code + if columns is not None or constant is not None or flip_delay_axis: + # pylint:disable=duplicate-code + # use passed parameters, overwrite config + offsets = {} + offsets["creation_date"] = datetime.now().timestamp() + # column-based offsets + if columns is not None: + if weights is None: + weights = 1 + if isinstance(weights, (int, float, np.integer, np.floating)): + weights = [weights] + if len(weights) == 1: + weights = [weights[0]] * len(columns) + if not isinstance(weights, Sequence): + raise TypeError( + f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + ) + if not all(isinstance(s, (int, float, np.integer, np.floating)) for s in weights): + raise TypeError( + f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + ) + + if isinstance(columns, str): + columns = [columns] + if isinstance(preserve_mean, bool): + preserve_mean = [preserve_mean] * len(columns) + if not isinstance(reductions, Sequence): + reductions = [reductions] + if len(reductions) == 1: + reductions = [reductions[0]] * len(columns) + + # store in offsets dictionary + for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): + offsets[col] = { + "weight": weight, + "preserve_mean": pmean, + "reduction": red, + } + + # constant offset + if isinstance(constant, (int, float, np.integer, np.floating)): + offsets["constant"] = constant + elif constant is not None: + raise TypeError(f"Invalid type for constant: {type(constant)}") + # flip the time direction + if flip_delay_axis: + offsets["flip_delay_axis"] = flip_delay_axis + + elif "creation_date" in offsets and verbose: + datestring = datetime.fromtimestamp(offsets["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using delay offset parameters generated on {datestring}") + + if len(offsets) > 0: + # unpack dictionary columns = [] weights = [] preserve_mean = [] reductions = [] - for k, v in self.offsets.items(): + if verbose: + print("Delay offset parameters:") + for k, v in offsets.items(): + if k == "creation_date": + continue if k == "constant": constant = v + if verbose: + print(f" Constant: {constant} ") elif k == "flip_delay_axis": fda = str(v) if fda.lower() in ["true", "1"]: @@ -242,77 +330,47 @@ def add_offsets( raise ValueError( f"Invalid value for flip_delay_axis in config: {flip_delay_axis}.", ) + if verbose: + print(f" Flip delay axis: {flip_delay_axis} ") else: columns.append(k) try: - weights.append(v["weight"]) - except KeyError as exc: - raise KeyError(f"Missing weight for offset column {k} in config.") from exc - preserve_mean.append(v.get("preserve_mean", False)) - reductions.append(v.get("reduction", None)) - - # apply offset - if columns is not None: - # use passed parameters - if isinstance(weights, (int, float, np.integer, np.floating)): - weights = [weights] - elif not isinstance(weights, Sequence): - raise TypeError( - f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + weight = v["weight"] + except KeyError: + weight = 1 + weights.append(weight) + pm = v.get("preserve_mean", False) + preserve_mean.append(pm) + red = v.get("reduction", None) + reductions.append(red) + if verbose: + print( + f" Column[{k}]: Weight={weight}, Preserve Mean: {pm}, ", + f"Reductions: {red}.", + ) + + if len(columns) > 0: + df = dfops.offset_by_other_columns( + df=df, + target_column=delay_column, + offset_columns=columns, + weights=weights, + preserve_mean=preserve_mean, + reductions=reductions, ) - if not all(isinstance(s, (int, float, np.integer, np.floating)) for s in weights): - raise TypeError( - f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + + if constant: + df[delay_column] = df.map_partitions( + lambda x: x[delay_column] + constant, + meta=(delay_column, np.float64), ) - df = dfops.offset_by_other_columns( - df=df, - target_column=delay_column, - offset_columns=columns, - weights=weights, - preserve_mean=preserve_mean, - reductions=reductions, - ) - metadata["delay_column"] = delay_column - metadata["columns"] = columns - metadata["weights"] = weights - metadata["preserve_mean"] = preserve_mean - metadata["reductions"] = reductions - # pylint: disable=duplicate-code - if not isinstance(columns, Sequence): - columns = [columns] - if not isinstance(weights, Sequence): - weights = [weights] - if isinstance(preserve_mean, bool): - preserve_mean = [preserve_mean] * len(columns) - if not isinstance(reductions, Sequence): - reductions = [reductions] - if len(reductions) == 1: - reductions = [reductions[0]] * len(columns) - - for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): - self.offsets[col] = { - "weight": weight, - "preserve_mean": pmean, - "reduction": red, - } - - # apply constant - if isinstance(constant, (int, float, np.integer, np.floating)): - df[delay_column] = df.map_partitions( - # flip sign if binding energy scale - lambda x: x[delay_column] + constant, - meta=(delay_column, np.float64), - ) - metadata["constant"] = constant - self.offsets["constant"] = constant - elif constant is not None: - raise TypeError(f"Invalid type for constant: {type(constant)}") - # flip the time direction - if flip_delay_axis: - df[delay_column] = -df[delay_column] - metadata["flip_delay_axis"] = True - self.offsets["flip_delay_axis"] = True + if flip_delay_axis: + df[delay_column] = -df[delay_column] + + self.offsets = offsets + metadata["offsets"] = offsets + return df, metadata @@ -326,13 +384,13 @@ def extract_delay_stage_parameters( Read delay stage ranges from hdf5 file Parameters: - file: filename - p1_key: hdf5 path to the start of the scan range - p2_key: hdf5 path to the end of the scan range - t0_key: hdf5 path to the t0 value + file (str): filename + p1_key (str): hdf5 path to the start of the scan range + p2_key (str): hdf5 path to the end of the scan range + t0_key (str): hdf5 path to the t0 value Returns: - (p1_value, p2_value, t0_value) + tuple: (p1_value, p2_value, t0_value) """ with h5py.File(file, "r") as file_handle: values = [] @@ -353,14 +411,11 @@ def mm_to_ps( (double pass). Args: - delay_mm (Union[float, Sequence[float]]): - Delay stage position in mm - time0_mm (_type_): - Delay stage position of pump-probe overlap in mm + delay_mm (Union[float, Sequence[float]]): Delay stage position in mm + time0_mm (float): Delay stage position of pump-probe overlap in mm Returns: - Union[float, Sequence[float]]: - Relative delay in picoseconds + Union[float, Sequence[float]]: Relative delay in picoseconds """ delay_ps = (delay_mm - time0_mm) / 0.15 return delay_ps diff --git a/sed/calibrator/energy.py b/sed/calibrator/energy.py index c8853053..22f91a18 100644 --- a/sed/calibrator/energy.py +++ b/sed/calibrator/energy.py @@ -4,6 +4,7 @@ import itertools as it import warnings as wn from copy import deepcopy +from datetime import datetime from functools import partial from typing import Any from typing import cast @@ -94,7 +95,7 @@ def __init__( self.featranges: List[Tuple] = [] # Value ranges for feature detection self.peaks: np.ndarray = np.asarray([]) - self.calibration: Dict[Any, Any] = {} + self.calibration: Dict[str, Any] = self._config["energy"].get("calibration", {}) self.tof_column = self._config["dataframe"]["tof_column"] self.tof_ns_column = self._config["dataframe"].get("tof_ns_column", None) @@ -114,7 +115,7 @@ def __init__( self.sector_delays = self._config["dataframe"].get("sector_delays", None) self.sector_id_column = self._config["dataframe"].get("sector_id_column", None) self.offsets: Dict[str, Any] = self._config["energy"].get("offsets", {}) - self.correction: Dict[Any, Any] = {} + self.correction: Dict[str, Any] = self._config["energy"].get("correction", {}) @property def ntraces(self) -> int: @@ -519,6 +520,7 @@ def calibrate( landmarks: np.ndarray = None, biases: np.ndarray = None, t: np.ndarray = None, + verbose: bool = True, **kwds, ) -> dict: """Calculate the functional mapping between time-of-flight and the energy @@ -543,6 +545,8 @@ def calibrate( calibration. Defaults to self.peaks. biases (np.ndarray, optional): Bias values. Defaults to self.biases. t (np.ndarray, optional): TOF values. Defaults to self.tof. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. **kwds: keyword arguments. See available keywords for ``poly_energy_calibration()`` and ``fit_energy_calibration()`` @@ -582,6 +586,7 @@ def calibrate( ref_id=ref_id, t=t, energy_scale=energy_scale, + verbose=verbose, **kwds, ) elif method in ("lstsq", "lsqr"): @@ -598,6 +603,7 @@ def calibrate( else: raise NotImplementedError() + self.calibration["creation_date"] = datetime.now().timestamp() return self.calibration def view( # pylint: disable=dangerous-default-value @@ -771,32 +777,13 @@ def view( # pylint: disable=dangerous-default-value pbk.show(fig) - def get_current_calibration(self) -> dict: - """Return the current calibration dictionary. - - If none is present, return the one from the config. If none is present there, - return an empty dictionary. - - Returns: - dict: Calibration dictionary. - """ - if self.calibration: - calibration = deepcopy(self.calibration) - else: - calibration = deepcopy( - self._config["energy"].get( - "calibration", - {}, - ), - ) - return calibration - def append_energy_axis( self, df: Union[pd.DataFrame, dask.dataframe.DataFrame], tof_column: str = None, energy_column: str = None, calibration: dict = None, + verbose: bool = True, **kwds, ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: """Calculate and append the energy axis to the events dataframe. @@ -811,6 +798,8 @@ def append_energy_axis( calibration (dict, optional): Calibration dictionary. If provided, overrides calibration from class or config. Defaults to self.calibration or config["energy"]["calibration"]. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. **kwds: additional keyword arguments for the energy conversion. They are added to the calibration dictionary. @@ -834,11 +823,20 @@ def append_energy_axis( binwidth = kwds.pop("binwidth", self.binwidth) binning = kwds.pop("binning", self.binning) + # pylint: disable=duplicate-code if calibration is None: - calibration = self.get_current_calibration() + calibration = deepcopy(self.calibration) + + if len(kwds) > 0: + for key, value in kwds.items(): + calibration[key] = value + calibration["creation_date"] = datetime.now().timestamp() - for key, value in kwds.items(): - calibration[key] = value + elif "creation_date" in calibration and verbose: + datestring = datetime.fromtimestamp(calibration["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy calibration parameters generated on {datestring}") # try to determine calibration type if not provided if "calib_type" not in calibration: @@ -1021,10 +1019,7 @@ def adjust_energy_correction( matplotlib.use("module://ipympl.backend_nbagg") if correction is None: - if self.correction: - correction = deepcopy(self.correction) - else: - correction = deepcopy(self._config["energy"].get("correction", {})) + correction = deepcopy(self.correction) if correction_type is not None: correction["correction_type"] = correction_type @@ -1151,6 +1146,7 @@ def common_apply_func(apply: bool): # pylint: disable=unused-argument self.correction["amplitude"] = correction["amplitude"] self.correction["center"] = correction["center"] self.correction["correction_type"] = correction["correction_type"] + self.correction["creation_date"] = datetime.now().timestamp() amplitude_slider.close() x_center_slider.close() y_center_slider.close() @@ -1320,6 +1316,7 @@ def apply_energy_correction( correction_type: str = None, amplitude: float = None, correction: dict = None, + verbose: bool = True, **kwds, ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: """Apply correction to the time-of-flight (TOF) axis of single-event data. @@ -1345,6 +1342,8 @@ def apply_energy_correction( correction (dict, optional): Correction dictionary containing paramters for the correction. Defaults to self.correction or config["energy"]["correction"]. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. **kwds: Additional parameters to use for the correction: - **x_column** (str): Name of the x column. @@ -1364,29 +1363,35 @@ def apply_energy_correction( and Energy correction metadata dictionary. """ if correction is None: - if self.correction: - correction = deepcopy(self.correction) - else: - correction = deepcopy(self._config["energy"].get("correction", {})) - - if correction_type is not None: - correction["correction_type"] = correction_type - - if amplitude is not None: - correction["amplitude"] = amplitude + correction = deepcopy(self.correction) x_column = kwds.pop("x_column", self.x_column) y_column = kwds.pop("y_column", self.y_column) - for key, value in kwds.items(): - correction[key] = value - if tof_column is None: tof_column = self.tof_column if new_tof_column is None: new_tof_column = self.corrected_tof_column + if correction_type is not None or amplitude is not None or len(kwds) > 0: + if correction_type is not None: + correction["correction_type"] = correction_type + + if amplitude is not None: + correction["amplitude"] = amplitude + + for key, value in kwds.items(): + correction[key] = value + + correction["creation_date"] = datetime.now().timestamp() + + elif "creation_date" in correction and verbose: + datestring = datetime.fromtimestamp(correction["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy correction parameters generated on {datestring}") + missing_keys = {"correction_type", "center", "amplitude"} - set(correction.keys()) if missing_keys: raise ValueError(f"Required correction parameters '{missing_keys}' missing!") @@ -1469,12 +1474,14 @@ def align_sector(x): def add_offsets( self, df: Union[pd.DataFrame, dask.dataframe.DataFrame] = None, + offsets: Dict[str, Any] = None, constant: float = None, columns: Union[str, Sequence[str]] = None, weights: Union[float, Sequence[float]] = None, preserve_mean: Union[bool, Sequence[bool]] = False, reductions: Union[str, Sequence[str]] = None, energy_column: str = None, + verbose: bool = True, ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: """Apply an offset to the energy column by the values of the provided columns. @@ -1482,10 +1489,9 @@ def add_offsets( config file. If parameters are passed, they are used to generate a new offset dictionary and the offset is applied using the ``dfops.apply_offset_from_columns()`` function. - # TODO: This funcion can still be improved and needs testsing - Args: df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + offsets (Dict, optional): Dictionary of energy offset parameters. constant (float, optional): The constant to shift the energy axis by. columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. weights (Union[float, Sequence[float]]): weights to apply to the columns. @@ -1497,109 +1503,143 @@ def add_offsets( to the column to generate a single value for the whole dataset. If None, the shift is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. energy_column (str, optional): Name of the column containing the energy values. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. Returns: dask.dataframe.DataFrame: Dataframe with the new columns. dict: Metadata dictionary. """ + if offsets is None: + offsets = deepcopy(self.offsets) + if energy_column is None: energy_column = self.energy_column - # if no parameters are passed, use config - if columns is None and constant is None: - # load from config + metadata: Dict[str, Any] = { + "applied": True, + } + + # flip sign for binding energy scale + energy_scale = self.calibration.get("energy_scale", None) + if energy_scale is None: + raise ValueError("Energy scale not set. Cannot interpret the sign of the offset.") + if energy_scale not in ["binding", "kinetic"]: + raise ValueError(f"Invalid energy scale: {energy_scale}") + scale_sign: Literal[-1, 1] = -1 if energy_scale == "binding" else 1 + + if columns is not None or constant is not None: + # pylint:disable=duplicate-code + # use passed parameters, overwrite config + offsets = {} + offsets["creation_date"] = datetime.now().timestamp() + # column-based offsets + if columns is not None: + if weights is None: + weights = 1 + if isinstance(weights, (int, float, np.integer, np.floating)): + weights = [weights] + if len(weights) == 1: + weights = [weights[0]] * len(columns) + if not isinstance(weights, Sequence): + raise TypeError(f"Invalid type for weights: {type(weights)}") + if not all(isinstance(s, (int, float, np.integer, np.floating)) for s in weights): + raise TypeError(f"Invalid type for weights: {type(weights)}") + + if isinstance(columns, str): + columns = [columns] + if isinstance(preserve_mean, bool): + preserve_mean = [preserve_mean] * len(columns) + if not isinstance(reductions, Sequence): + reductions = [reductions] + if len(reductions) == 1: + reductions = [reductions[0]] * len(columns) + + # store in offsets dictionary + for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): + offsets[col] = { + "weight": weight, + "preserve_mean": pmean, + "reduction": red, + } + + # constant offset + if isinstance(constant, (int, float, np.integer, np.floating)): + offsets["constant"] = constant + elif constant is not None: + raise TypeError(f"Invalid type for constant: {type(constant)}") + + elif "creation_date" in offsets and verbose: + datestring = datetime.fromtimestamp(offsets["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy offset parameters generated on {datestring}") + + if len(offsets) > 0: + # unpack dictionary + # pylint: disable=duplicate-code columns = [] weights = [] preserve_mean = [] reductions = [] - for k, v in self.offsets.items(): + if verbose: + print("Energy offset parameters:") + for k, v in offsets.items(): + if k == "creation_date": + continue if k == "constant": - constant = v + # flip sign if binding energy scale + constant = v * scale_sign + if verbose: + print(f" Constant: {constant} ") else: columns.append(k) try: - weights.append(v["weight"]) - except KeyError as exc: - raise KeyError(f"Missing weight for offset column {k} in config.") from exc + weight = v["weight"] + except KeyError: + weight = 1 + if not isinstance(weight, (int, float, np.integer, np.floating)): + raise TypeError(f"Invalid type for weight of column {k}: {type(weight)}") + # flip sign if binding energy scale + weight = weight * scale_sign + weights.append(weight) pm = v.get("preserve_mean", False) if str(pm).lower() in ["false", "0", "no"]: pm = False elif str(pm).lower() in ["true", "1", "yes"]: pm = True preserve_mean.append(pm) - rd = v.get("reduction", None) - if str(rd).lower() == "none": - rd = None - reductions.append(rd) - - # flip sign for binding energy scale - energy_scale = self.get_current_calibration().get("energy_scale", None) - if energy_scale is None: - raise ValueError("Energy scale not set. Cannot interpret the sign of the offset.") - if energy_scale not in ["binding", "kinetic"]: - raise ValueError(f"Invalid energy scale: {energy_scale}") - scale_sign: Literal[-1, 1] = -1 if energy_scale == "binding" else 1 - # initialize metadata container - metadata: Dict[str, Any] = { - "applied": True, - } - # apply offset - if columns is not None: - # use passed parameters - if isinstance(weights, int): - weights = [weights] - elif not isinstance(weights, Sequence): - raise TypeError(f"Invalid type for weights: {type(weights)}") - if not all(isinstance(s, int) for s in weights): - raise TypeError(f"Invalid type for weights: {type(weights)}") - # flip weights if binding energy scale - weights = [s * scale_sign for s in weights] - - df = dfops.offset_by_other_columns( - df=df, - target_column=energy_column, - offset_columns=columns, - weights=weights, - preserve_mean=preserve_mean, - reductions=reductions, - inplace=True, - ) - metadata["energy_column"] = energy_column - metadata["columns"] = columns - metadata["weights"] = weights - metadata["preserve_mean"] = preserve_mean - metadata["reductions"] = reductions - - # overwrite the current offset dictionary with the parameters used - if not isinstance(columns, Sequence): - columns = [columns] - if not isinstance(weights, Sequence): - weights = [weights] - if isinstance(preserve_mean, bool): - preserve_mean = [preserve_mean] * len(columns) - if not isinstance(reductions, Sequence): - reductions = [reductions] - if len(reductions) == 1: - reductions = [reductions[0]] * len(columns) - - for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): - self.offsets[col] = { - "weight": weight, - "preserve_mean": pmean, - "reduction": red, - } + red = v.get("reduction", None) + if str(red).lower() in ["none", "null"]: + red = None + reductions.append(red) + if verbose: + print( + f" Column[{k}]: Weight={weight}, Preserve Mean: {pm}, ", + f"Reductions: {red}.", + ) + + if len(columns) > 0: + df = dfops.offset_by_other_columns( + df=df, + target_column=energy_column, + offset_columns=columns, + weights=weights, + preserve_mean=preserve_mean, + reductions=reductions, + ) # apply constant - if isinstance(constant, (int, float, np.integer, np.floating)): + if constant: + if not isinstance(constant, (int, float, np.integer, np.floating)): + raise TypeError(f"Invalid type for constant: {type(constant)}") df[energy_column] = df.map_partitions( - # flip sign if binding energy scale - lambda x: x[energy_column] + constant * scale_sign, + lambda x: x[energy_column] + constant, meta=(energy_column, np.float64), ) - metadata["constant"] = constant - self.offsets["constant"] = constant - elif constant is not None: - raise TypeError(f"Invalid type for constant: {type(constant)}") + + self.offsets = offsets + metadata["offsets"] = offsets return df, metadata @@ -2054,6 +2094,7 @@ def fit_energy_calibation( ref_energy: float = None, t: Union[List[float], np.ndarray] = None, energy_scale: str = "kinetic", + verbose: bool = True, **kwds, ) -> dict: """Energy calibration by nonlinear least squares fitting of spectral landmarks on @@ -2076,12 +2117,16 @@ def fit_energy_calibation( - **'kinetic'**: increasing energy with decreasing TOF. - **'binding'**: increasing energy with increasing TOF. - t0 (float, optional): constrains and initial values for the fit parameter t0, corresponding - to the time of flight offset. Defaults to 1e-6. - E0 (float, optional): constrains and initial values for the fit parameter E0, corresponding - to the energy offset. Defaults to min(vals). - d (float, optional): constrains and initial values for the fit parameter d, corresponding - to the drift distance. Defaults to 1. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + **kwds: keyword arguments: + + - **t0** (float): constrains and initial values for the fit parameter t0, + corresponding to the time of flight offset. Defaults to 1e-6. + - **E0** (float): constrains and initial values for the fit parameter E0, + corresponding to the energy offset. Defaults to min(vals). + - **d** (float): constrains and initial values for the fit parameter d, + corresponding to the drift distance. Defaults to 1. Returns: dict: A dictionary of fitting parameters including the following, @@ -2147,7 +2192,8 @@ def residual(pars, time, data, binwidth, binning, energy_scale): fcn_args=(pos, vals, binwidth, binning, energy_scale), ) result = fit.leastsq() - report_fit(result) + if verbose: + report_fit(result) # Construct the calibrating function pfunc = partial( diff --git a/sed/calibrator/momentum.py b/sed/calibrator/momentum.py index c3f7e344..65749666 100644 --- a/sed/calibrator/momentum.py +++ b/sed/calibrator/momentum.py @@ -3,6 +3,7 @@ """ import itertools as it from copy import deepcopy +from datetime import datetime from typing import Any from typing import Dict from typing import List @@ -72,7 +73,7 @@ def __init__( self.slice: np.ndarray = None self.slice_corrected: np.ndarray = None self.slice_transformed: np.ndarray = None - self.bin_ranges: List[Tuple] = [] + self.bin_ranges: List[Tuple] = self._config["momentum"].get("bin_ranges", []) if data is not None: self.load_data(data=data, bin_ranges=bin_ranges) @@ -103,10 +104,10 @@ def __init__( self.cdeform_field_bkp: np.ndarray = None self.inverse_dfield: np.ndarray = None self.dfield_updated: bool = False - self.transformations: Dict[Any, Any] = {} - self.correction: Dict[Any, Any] = {"applied": False} - self.adjust_params: Dict[Any, Any] = {"applied": False} - self.calibration: Dict[Any, Any] = {} + self.transformations: Dict[str, Any] = self._config["momentum"].get("transformations", {}) + self.correction: Dict[str, Any] = self._config["momentum"].get("correction", {}) + self.adjust_params: Dict[str, Any] = {} + self.calibration: Dict[str, Any] = self._config["momentum"].get("calibration", {}) self.x_column = self._config["dataframe"]["x_column"] self.y_column = self._config["dataframe"]["y_column"] @@ -314,7 +315,7 @@ def select_slice( def add_features( self, - features: np.ndarray = None, + features: np.ndarray, direction: str = "ccw", rotsym: int = 6, symscores: bool = True, @@ -324,11 +325,10 @@ def add_features( detects the center of the points and orders the points. Args: - features (np.ndarray, optional): + features (np.ndarray): Array of landmarks, possibly including a center peak. Its shape should be (n,2), where n is equal to the rotation symmetry, or the rotation symmetry+1, if the center is included. - Defaults to config["momentum"]["correction"]["feature_points"]. direction (str, optional): Direction for ordering the points. Defaults to "ccw". symscores (bool, optional): @@ -342,21 +342,6 @@ def add_features( Raises: ValueError: Raised if the number of points does not match the rotsym. """ - if features is None: - # loading config defauls - try: - features = np.asarray( - self._config["momentum"]["correction"]["feature_points"], - ) - rotsym = self._config["momentum"]["correction"]["rotation_symmetry"] - include_center = self._config["momentum"]["correction"]["include_center"] - if not include_center and len(features) > rotsym: - features = features[:rotsym, :] - except KeyError as exc: - raise ValueError( - "No valid landmarks defined, and no defaults found in configuration!", - ) from exc - self.rotsym = int(rotsym) self.rotsym_angle = int(360 / self.rotsym) self.arot = np.array([0] + [self.rotsym_angle] * (self.rotsym - 1)) @@ -650,13 +635,46 @@ def spline_warp_estimate( if self.pouter_ord is None: if self.pouter is not None: self.pouter_ord = po.pointset_order(self.pouter) + self.correction["creation_date"] = datetime.now().timestamp() else: - print("No landmarks defined, using config defaults.") - self.add_features() + try: + features = np.asarray( + self.correction["feature_points"], + ) + rotsym = self.correction["rotation_symmetry"] + include_center = self.correction["include_center"] + if not include_center and len(features) > rotsym: + features = features[:rotsym, :] + + if verbose: + if "creation_date" in self.correction: + datestring = datetime.fromtimestamp( + self.correction["creation_date"], + ).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print( + "No landmarks defined, using momentum correction parameters " + f"generated on {datestring}", + ) + else: + print( + "No landmarks defined, using momentum correction parameters " + "from config.", + ) + except KeyError as exc: + raise ValueError( + "No valid landmarks defined, and no landmarks found in configuration!", + ) from exc + + self.add_features(features=features, rotsym=rotsym, include_center=include_center) + + else: + self.correction["creation_date"] = datetime.now().timestamp() if use_center is None: try: - use_center = self._config["momentum"]["correction"]["use_center"] + use_center = self.correction["use_center"] except KeyError: use_center = True self.use_center = use_center @@ -714,13 +732,19 @@ def spline_warp_estimate( self.rdeform_field_bkp = self.rdeform_field self.cdeform_field_bkp = self.cdeform_field - self.correction["applied"] = True - self.correction["pouter"] = self.pouter_ord - self.correction["pcent"] = np.asarray(self.pcent) - self.correction["prefs"] = self.prefs - self.correction["ptargs"] = self.ptargs - self.correction["rotsym"] = self.rotsym + self.correction["outer_points"] = self.pouter_ord + self.correction["center_point"] = np.asarray(self.pcent) + self.correction["reference_points"] = self.prefs + self.correction["target_points"] = self.ptargs + self.correction["rotation_symmetry"] = self.rotsym self.correction["use_center"] = self.use_center + self.correction["include_center"] = self.include_center + if self.include_center: + self.correction["feature_points"] = np.concatenate( + (self.pouter_ord, np.asarray([self.pcent])), + ) + else: + self.correction["feature_points"] = self.pouter_ord if self.slice is not None: self.slice_corrected = corrected_image @@ -969,27 +993,19 @@ def coordinate_transform( def pose_adjustment( self, - scale: float = 1, - xtrans: float = 0, - ytrans: float = 0, - angle: float = 0, + transformations: Dict[str, Any] = None, apply: bool = False, reset: bool = True, verbose: bool = True, + **kwds, ): """Interactive panel to adjust transformations that are applied to the image. Applies first a scaling, next a x/y translation, and last a rotation around the center of the image (pixel 256/256). Args: - scale (float, optional): - Initial value of the scaling slider. Defaults to 1. - xtrans (float, optional): - Initial value of the xtrans slider. Defaults to 0. - ytrans (float, optional): - Initial value of the ytrans slider. Defaults to 0. - angle (float, optional): - Initial value of the angle slider. Defaults to 0. + transformations (dict, optional): Dictionary with transformations. + Defaults to self.transformations or config["momentum"]["transformtions"]. apply (bool, optional): Option to directly apply the provided transformations. Defaults to False. @@ -997,9 +1013,22 @@ def pose_adjustment( Option to reset the correction before transformation. Defaults to True. verbose (bool, optional): Option to report the performed transformations. Defaults to True. + **kwds: Keyword parameters defining defaults for the transformations: + + - **scale** (float): Initial value of the scaling slider. + - **xtrans** (float): Initial value of the xtrans slider. + - **ytrans** (float): Initial value of the ytrans slider. + - **angle** (float): Initial value of the angle slider. """ matplotlib.use("module://ipympl.backend_nbagg") - source_image = self.slice_corrected + if self.slice_corrected is None or not self.slice_corrected.any(): + if self.slice is None or not self.slice.any(): + self.slice = np.zeros(self._config["momentum"]["bins"][0:2]) + source_image = self.slice + plot = False + else: + source_image = self.slice_corrected + plot = True transformed_image = source_image @@ -1010,25 +1039,41 @@ def pose_adjustment( else: self.reset_deformation() - fig, ax = plt.subplots(1, 1) - img = ax.imshow(transformed_image.T, origin="lower", cmap="terrain_r") center = self._config["momentum"]["center_pixel"] - ax.axvline(x=center[0]) - ax.axhline(y=center[1]) + if plot: + fig, ax = plt.subplots(1, 1) + img = ax.imshow(transformed_image.T, origin="lower", cmap="terrain_r") + ax.axvline(x=center[0]) + ax.axhline(y=center[1]) + + if transformations is None: + transformations = deepcopy(self.transformations) + + if len(kwds) > 0: + for key, value in kwds.items(): + transformations[key] = value + + elif "creation_date" in transformations and verbose: + datestring = datetime.fromtimestamp(transformations["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using transformation parameters generated on {datestring}") def update(scale: float, xtrans: float, ytrans: float, angle: float): transformed_image = source_image if scale != 1: - self.transformations["scale"] = scale + transformations["scale"] = scale transformed_image = self.coordinate_transform( image=transformed_image, transform_type="scaling", xscale=scale, yscale=scale, ) + if xtrans != 0: + transformations["xtrans"] = xtrans + if ytrans != 0: + transformations["ytrans"] = ytrans if xtrans != 0 or ytrans != 0: - self.transformations["xtrans"] = xtrans - self.transformations["ytrans"] = ytrans transformed_image = self.coordinate_transform( image=transformed_image, transform_type="translation", @@ -1036,41 +1081,52 @@ def update(scale: float, xtrans: float, ytrans: float, angle: float): ytrans=ytrans, ) if angle != 0: - self.transformations["angle"] = angle + transformations["angle"] = angle transformed_image = self.coordinate_transform( image=transformed_image, transform_type="rotation", angle=angle, center=center, ) - img.set_data(transformed_image.T) - axmin = np.min(transformed_image, axis=(0, 1)) - axmax = np.max(transformed_image, axis=(0, 1)) - if axmin < axmax: - img.set_clim(axmin, axmax) - fig.canvas.draw_idle() - - update(scale=scale, xtrans=xtrans, ytrans=ytrans, angle=angle) + if plot: + img.set_data(transformed_image.T) + axmin = np.min(transformed_image, axis=(0, 1)) + axmax = np.max(transformed_image, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + fig.canvas.draw_idle() + + update( + scale=transformations.get("scale", 1), + xtrans=transformations.get("xtrans", 0), + ytrans=transformations.get("ytrans", 0), + angle=transformations.get("angle", 0), + ) scale_slider = ipw.FloatSlider( - value=scale, + value=transformations.get("scale", 1), min=0.8, max=1.2, step=0.01, ) xtrans_slider = ipw.FloatSlider( - value=xtrans, + value=transformations.get("xtrans", 0), min=-200, max=200, step=1, ) ytrans_slider = ipw.FloatSlider( - value=ytrans, + value=transformations.get("ytrans", 0), min=-200, max=200, step=1, ) - angle_slider = ipw.FloatSlider(value=angle, min=-180, max=180, step=1) + angle_slider = ipw.FloatSlider( + value=transformations.get("angle", 0), + min=-180, + max=180, + step=1, + ) results_box = ipw.Output() ipw.interact( update, @@ -1081,60 +1137,64 @@ def update(scale: float, xtrans: float, ytrans: float, angle: float): ) def apply_func(apply: bool): # pylint: disable=unused-argument - if self.transformations.get("scale", 1) != 1: + if transformations.get("scale", 1) != 1: self.coordinate_transform( transform_type="scaling", - xscale=self.transformations["scale"], - yscale=self.transformations["scale"], + xscale=transformations["scale"], + yscale=transformations["scale"], keep=True, ) if verbose: with results_box: - print(f"Applied scaling with scale={self.transformations['scale']}.") - if ( - self.transformations.get("xtrans", 0) != 0 - or self.transformations.get("ytrans", 0) != 0 - ): + print(f"Applied scaling with scale={transformations['scale']}.") + if transformations.get("xtrans", 0) != 0 or transformations.get("ytrans", 0) != 0: self.coordinate_transform( transform_type="translation", - xtrans=self.transformations["xtrans"], - ytrans=self.transformations["ytrans"], + xtrans=transformations.get("xtrans", 0), + ytrans=transformations.get("ytrans", 0), keep=True, ) if verbose: with results_box: print( - f"Applied translation with (xtrans={self.transformations['xtrans']},", - f"ytrans={self.transformations['ytrans']}).", + f"Applied translation with (xtrans={transformations.get('xtrans', 0)},", + f"ytrans={transformations.get('ytrans', 0)}).", ) - if self.transformations.get("angle", 0) != 0: + if transformations.get("angle", 0) != 0: self.coordinate_transform( transform_type="rotation", - angle=self.transformations["angle"], + angle=transformations["angle"], center=center, keep=True, ) if verbose: with results_box: - print(f"Applied rotation with angle={self.transformations['angle']}.") + print(f"Applied rotation with angle={transformations['angle']}.") display(results_box) - img.set_data(self.slice_transformed.T) - axmin = np.min(self.slice_transformed, axis=(0, 1)) - axmax = np.max(self.slice_transformed, axis=(0, 1)) - if axmin < axmax: - img.set_clim(axmin, axmax) - fig.canvas.draw_idle() - - plt.figure() - subs = 20 - plt.title("Deformation field") - plt.scatter( - self.rdeform_field[::subs, ::subs].ravel(), - self.cdeform_field[::subs, ::subs].ravel(), - c="b", - ) + if plot: + img.set_data(self.slice_transformed.T) + axmin = np.min(self.slice_transformed, axis=(0, 1)) + axmax = np.max(self.slice_transformed, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + fig.canvas.draw_idle() + + if transformations != self.transformations: + transformations["creation_date"] = datetime.now().timestamp() + self.transformations = transformations + + if verbose: + plt.figure() + subs = 20 + plt.title("Deformation field") + plt.scatter( + self.rdeform_field[::subs, ::subs].ravel(), + self.cdeform_field[::subs, ::subs].ravel(), + c="b", + ) + plt.show() scale_slider.close() xtrans_slider.close() ytrans_slider.close() @@ -1145,7 +1205,8 @@ def apply_func(apply: bool): # pylint: disable=unused-argument display(apply_button) apply_button.on_click(apply_func) - plt.show() + if plot: + plt.show() if apply: apply_func(True) @@ -1559,6 +1620,7 @@ def calibrate( # Assemble into return dictionary self.calibration = {} + self.calibration["creation_date"] = datetime.now().timestamp() self.calibration["kx_axis"] = k_row self.calibration["ky_axis"] = k_col self.calibration["grid"] = (k_rowgrid, k_colgrid) @@ -1585,6 +1647,7 @@ def apply_corrections( y_column: str = None, new_x_column: str = None, new_y_column: str = None, + verbose: bool = True, **kwds, ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: """Calculate and replace the X and Y values with their distortion-corrected @@ -1603,6 +1666,8 @@ def apply_corrections( new_y_column (str, optional): Label of the 'Y' column after momentum distortion correction. Defaults to config["momentum"]["corrected_y_column"]. + verbose (bool, optional): Option to report the used landmarks for correction. + Defaults to True. **kwds: Keyword arguments: - **dfield**: Inverse dfield @@ -1627,9 +1692,15 @@ def apply_corrections( if self.inverse_dfield is None or self.dfield_updated: if self.rdeform_field is None and self.cdeform_field is None: - # Apply defaults - self.add_features() - self.spline_warp_estimate() + if self.correction or self.transformations: + if self.correction: + # Generate spline warp from class features or config + self.spline_warp_estimate(verbose=verbose) + if self.transformations: + # Apply config pose adjustments + self.pose_adjustment() + else: + raise ValueError("No corrections or transformations defined!") self.inverse_dfield = generate_inverse_dfield( self.rdeform_field, @@ -1661,12 +1732,19 @@ def gather_correction_metadata(self) -> dict: dict: generated correction metadata dictionary. """ metadata: Dict[Any, Any] = {} - if self.correction["applied"]: + if len(self.correction) > 0: metadata["correction"] = self.correction + metadata["correction"]["applied"] = True metadata["correction"]["cdeform_field"] = self.cdeform_field metadata["correction"]["rdeform_field"] = self.rdeform_field - if self.adjust_params["applied"]: + try: + metadata["correction"]["creation_date"] = self.correction["creation_date"] + except KeyError: + pass + if len(self.adjust_params) > 0: metadata["registration"] = self.adjust_params + metadata["registration"]["creation_date"] = datetime.now().timestamp() + metadata["registration"]["applied"] = True metadata["registration"]["depends_on"] = ( "/entry/process/registration/tranformations/rot_z" if "angle" in metadata["registration"] and metadata["registration"]["angle"] @@ -1774,18 +1852,12 @@ def append_k_axis( # pylint: disable=duplicate-code if calibration is None: - if self.calibration: - calibration = deepcopy(self.calibration) - else: - calibration = deepcopy( - self._config["momentum"].get( - "calibration", - {}, - ), - ) + calibration = deepcopy(self.calibration) - for key, value in kwds.items(): - calibration[key] = value + if len(kwds) > 0: + for key, value in kwds.items(): + calibration[key] = value + calibration["creation_date"] = datetime.now().timestamp() try: (df[new_x_column], df[new_y_column]) = detector_coordiantes_2_k_koordinates( @@ -1822,6 +1894,10 @@ def gather_calibration_metadata(self, calibration: dict = None) -> dict: if calibration is None: calibration = self.calibration metadata: Dict[Any, Any] = {} + try: + metadata["creation_date"] = calibration["creation_date"] + except KeyError: + pass metadata["applied"] = True metadata["calibration"] = calibration # create empty calibrated axis entries, if they are not present. diff --git a/sed/core/processor.py b/sed/core/processor.py index 088b6b12..9bd86dec 100644 --- a/sed/core/processor.py +++ b/sed/core/processor.py @@ -2,6 +2,7 @@ """ import pathlib +from datetime import datetime from typing import Any from typing import cast from typing import Dict @@ -55,8 +56,12 @@ class SedProcessor: the config. Defaults to None. folder (str, optional): Folder containing files to pass to the loader defined in the config. Defaults to None. + runs (Sequence[str], optional): List of run identifiers to pass to the loader + defined in the config. Defaults to None. collect_metadata (bool): Option to collect metadata from files. Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"] or False. **kwds: Keyword arguments passed to the reader. """ @@ -69,7 +74,7 @@ def __init__( folder: str = None, runs: Sequence[str] = None, collect_metadata: bool = False, - verbose: bool = False, + verbose: bool = None, **kwds, ): """Processor class of sed. Contains wrapper functions defining a work flow @@ -87,8 +92,10 @@ def __init__( defined in the config. Defaults to None. runs (Sequence[str], optional): List of run identifiers to pass to the loader defined in the config. Defaults to None. - collect_metadata (bool): Option to collect metadata from files. + collect_metadata (bool, optional): Option to collect metadata from files. Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"] or False. **kwds: Keyword arguments passed to parse_config and to the reader. """ config_kwds = { @@ -102,7 +109,10 @@ def __init__( num_cores = N_CPU - 1 self._config["binning"]["num_cores"] = num_cores - self.verbose = verbose + if verbose is None: + self.verbose = self._config["core"].get("verbose", False) + else: + self.verbose = verbose self._dataframe: Union[pd.DataFrame, ddf.DataFrame] = None self._timed_dataframe: Union[pd.DataFrame, ddf.DataFrame] = None @@ -351,6 +361,8 @@ def load( loader. Defaults to None. folder (str, optional): Folder path to pass to the loader. Defaults to None. + collect_metadata (bool, optional): Option for collecting metadata in the reader. + **kwds: Keyword parameters passed to the reader. Raises: ValueError: Raised if no valid input is provided. @@ -516,8 +528,10 @@ def define_features( Defaults to False. include_center (bool, optional): Option to include a point at the center in the feature list. Defaults to True. - ***kwds: Keyword arguments for MomentumCorrector.feature_extract() and - MomentumCorrector.feature_select() + apply (bool, optional): Option to directly apply the values and select the + slice. Defaults to False. + **kwds: Keyword arguments for ``MomentumCorrector.feature_extract()`` and + ``MomentumCorrector.feature_select()``. """ if auto_detect: # automatic feature selection sigma = kwds.pop("sigma", self._config["momentum"]["sigma"]) @@ -548,6 +562,7 @@ def define_features( def generate_splinewarp( self, use_center: bool = None, + verbose: bool = None, **kwds, ): """3. Step of the distortion correction workflow: Generate the correction @@ -556,11 +571,16 @@ def generate_splinewarp( Args: use_center (bool, optional): Option to use the position of the center point in the correction. Default is read from config, or set to True. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. **kwds: Keyword arguments for MomentumCorrector.spline_warp_estimate(). """ - self.mc.spline_warp_estimate(use_center=use_center, **kwds) + if verbose is None: + verbose = self.verbose - if self.mc.slice is not None: + self.mc.spline_warp_estimate(use_center=use_center, verbose=verbose, **kwds) + + if self.mc.slice is not None and verbose: print("Original slice with reference features") self.mc.view(annotated=True, backend="bokeh", crosshair=True) @@ -597,40 +617,44 @@ def save_splinewarp( """ if filename is None: filename = "sed_config.yaml" - points = [] - if self.mc.pouter_ord is not None: # if there is any calibration info - try: - for point in self.mc.pouter_ord: - points.append([float(i) for i in point]) - if self.mc.include_center: - points.append([float(i) for i in self.mc.pcent]) - except AttributeError as exc: - raise AttributeError( - "Momentum correction parameters not found, need to generate parameters first!", - ) from exc - config = { - "momentum": { - "correction": { - "rotation_symmetry": self.mc.rotsym, - "feature_points": points, - "include_center": self.mc.include_center, - "use_center": self.mc.use_center, - }, - }, - } - save_config(config, filename, overwrite) + if len(self.mc.correction) == 0: + raise ValueError("No momentum correction parameters to save!") + correction = {} + for key, value in self.mc.correction.items(): + if key in ["reference_points", "target_points", "cdeform_field", "rdeform_field"]: + continue + if key in ["use_center", "rotation_symmetry"]: + correction[key] = value + elif key == "center_point": + correction[key] = [float(i) for i in value] + elif key in ["outer_points", "feature_points"]: + correction[key] = [] + for point in value: + correction[key].append([float(i) for i in point]) + else: + correction[key] = float(value) + + if "creation_date" not in correction: + correction["creation_date"] = datetime.now().timestamp() + + config = { + "momentum": { + "correction": correction, + }, + } + save_config(config, filename, overwrite) + print(f'Saved momentum correction parameters to "{filename}".') # 4. Pose corrections. Provide interactive interface for correcting # scaling, shift and rotation def pose_adjustment( self, - scale: float = 1, - xtrans: float = 0, - ytrans: float = 0, - angle: float = 0, + transformations: Dict[str, Any] = None, apply: bool = False, use_correction: bool = True, reset: bool = True, + verbose: bool = None, + **kwds, ): """3. step of the distortion correction workflow: Generate an interactive panel to adjust affine transformations that are applied to the image. Applies first @@ -638,27 +662,30 @@ def pose_adjustment( the image. Args: - scale (float, optional): Initial value of the scaling slider. - Defaults to 1. - xtrans (float, optional): Initial value of the xtrans slider. - Defaults to 0. - ytrans (float, optional): Initial value of the ytrans slider. - Defaults to 0. - angle (float, optional): Initial value of the angle slider. - Defaults to 0. + transformations (dict, optional): Dictionary with transformations. + Defaults to self.transformations or config["momentum"]["transformtions"]. apply (bool, optional): Option to directly apply the provided transformations. Defaults to False. use_correction (bool, option): Whether to use the spline warp correction or not. Defaults to True. - reset (bool, optional): - Option to reset the correction before transformation. Defaults to True. + reset (bool, optional): Option to reset the correction before transformation. + Defaults to True. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword parameters defining defaults for the transformations: + + - **scale** (float): Initial value of the scaling slider. + - **xtrans** (float): Initial value of the xtrans slider. + - **ytrans** (float): Initial value of the ytrans slider. + - **angle** (float): Initial value of the angle slider. """ + if verbose is None: + verbose = self.verbose + # Generate homomorphy as default if no distortion correction has been applied if self.mc.slice_corrected is None: if self.mc.slice is None: - raise ValueError( - "No slice for corrections and transformations loaded!", - ) + self.mc.slice = np.zeros(self._config["momentum"]["bins"][0:2]) self.mc.slice_corrected = self.mc.slice if not use_correction: @@ -666,59 +693,116 @@ def pose_adjustment( if self.mc.cdeform_field is None or self.mc.rdeform_field is None: # Generate distortion correction from config values - self.mc.add_features() - self.mc.spline_warp_estimate() + self.mc.spline_warp_estimate(verbose=verbose) self.mc.pose_adjustment( - scale=scale, - xtrans=xtrans, - ytrans=ytrans, - angle=angle, + transformations=transformations, apply=apply, reset=reset, + verbose=verbose, + **kwds, ) + # 4a. Save pose adjustment parameters to config file. + def save_transformations( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the pose adjustment parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.mc.transformations) == 0: + raise ValueError("No momentum transformation parameters to save!") + transformations = {} + for key, value in self.mc.transformations.items(): + transformations[key] = float(value) + + if "creation_date" not in transformations: + transformations["creation_date"] = datetime.now().timestamp() + + config = { + "momentum": { + "transformations": transformations, + }, + } + save_config(config, filename, overwrite) + print(f'Saved momentum transformation parameters to "{filename}".') + # 5. Apply the momentum correction to the dataframe def apply_momentum_correction( self, preview: bool = False, + verbose: bool = None, + **kwds, ): """Applies the distortion correction and pose adjustment (optional) to the dataframe. Args: - rdeform_field (np.ndarray, optional): Row deformation field. - Defaults to None. - cdeform_field (np.ndarray, optional): Column deformation field. - Defaults to None. - inv_dfield (np.ndarray, optional): Inverse deformation field. - Defaults to None. - preview (bool): Option to preview the first elements of the data frame. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword parameters for ``MomentumCorrector.apply_correction``: + + - **rdeform_field** (np.ndarray, optional): Row deformation field. + - **cdeform_field** (np.ndarray, optional): Column deformation field. + - **inv_dfield** (np.ndarray, optional): Inverse deformation field. + """ + if verbose is None: + verbose = self.verbose + + x_column = self._config["dataframe"]["x_column"] + y_column = self._config["dataframe"]["y_column"] + if self._dataframe is not None: - print("Adding corrected X/Y columns to dataframe:") - self._dataframe, metadata = self.mc.apply_corrections( + if verbose: + print("Adding corrected X/Y columns to dataframe:") + df, metadata = self.mc.apply_corrections( df=self._dataframe, + verbose=verbose, + **kwds, ) - if self._timed_dataframe is not None: - if ( - self._config["dataframe"]["x_column"] in self._timed_dataframe.columns - and self._config["dataframe"]["y_column"] in self._timed_dataframe.columns - ): - self._timed_dataframe, _ = self.mc.apply_corrections( - self._timed_dataframe, - ) + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + tdf, _ = self.mc.apply_corrections( + self._timed_dataframe, + verbose=False, + **kwds, + ) + # Add Metadata self._attributes.add( metadata, "momentum_correction", duplicate_policy="merge", ) - if preview: - print(self._dataframe.head(10)) - else: - if self.verbose: - print(self._dataframe) + self._dataframe = df + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe) # Momentum calibration work flow # 1. Calculate momentum calibration @@ -789,23 +873,17 @@ def save_momentum_calibration( """ if filename is None: filename = "sed_config.yaml" + if len(self.mc.calibration) == 0: + raise ValueError("No momentum calibration parameters to save!") calibration = {} - try: - for key in [ - "kx_scale", - "ky_scale", - "x_center", - "y_center", - "rstart", - "cstart", - "rstep", - "cstep", - ]: - calibration[key] = float(self.mc.calibration[key]) - except KeyError as exc: - raise KeyError( - "Momentum calibration parameters not found, need to generate parameters first!", - ) from exc + for key, value in self.mc.calibration.items(): + if key in ["kx_axis", "ky_axis", "grid", "extent"]: + continue + + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() config = {"momentum": {"calibration": calibration}} save_config(config, filename, overwrite) @@ -816,6 +894,8 @@ def apply_momentum_calibration( self, calibration: dict = None, preview: bool = False, + verbose: bool = None, + **kwds, ): """2. step of the momentum calibration work flow: Apply the momentum calibration stored in the class to the dataframe. If corrected X/Y axis exist, @@ -824,23 +904,36 @@ def apply_momentum_calibration( Args: calibration (dict, optional): Optional dictionary with calibration data to use. Defaults to None. - preview (bool): Option to preview the first elements of the data frame. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword args passed to ``DelayCalibrator.append_delay_axis``. """ + if verbose is None: + verbose = self.verbose + + x_column = self._config["dataframe"]["x_column"] + y_column = self._config["dataframe"]["y_column"] + if self._dataframe is not None: - print("Adding kx/ky columns to dataframe:") - self._dataframe, metadata = self.mc.append_k_axis( + if verbose: + print("Adding kx/ky columns to dataframe:") + df, metadata = self.mc.append_k_axis( df=self._dataframe, calibration=calibration, + **kwds, ) - if self._timed_dataframe is not None: - if ( - self._config["dataframe"]["x_column"] in self._timed_dataframe.columns - and self._config["dataframe"]["y_column"] in self._timed_dataframe.columns - ): - self._timed_dataframe, _ = self.mc.append_k_axis( - df=self._timed_dataframe, - calibration=calibration, - ) + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + tdf, _ = self.mc.append_k_axis( + df=self._timed_dataframe, + calibration=calibration, + **kwds, + ) # Add Metadata self._attributes.add( @@ -848,11 +941,20 @@ def apply_momentum_calibration( "momentum_calibration", duplicate_policy="merge", ) - if preview: - print(self._dataframe.head(10)) - else: - if self.verbose: - print(self._dataframe) + self._dataframe = df + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe) # Energy correction workflow # 1. Adjust the energy correction parameters @@ -884,6 +986,7 @@ def adjust_energy_correction( correction. Defaults to config["energy"]["correction"]["center"]. apply (bool, optional): Option to directly apply the provided or default correction parameters. Defaults to False. + **kwds: Keyword parameters passed to ``EnergyCalibrator.adjust_energy_correction()``. """ if self._pre_binned is None: print( @@ -916,19 +1019,19 @@ def save_energy_correction( """ if filename is None: filename = "sed_config.yaml" + if len(self.ec.correction) == 0: + raise ValueError("No energy correction parameters to save!") correction = {} - try: - for key, val in self.ec.correction.items(): - if key == "correction_type": - correction[key] = val - elif key == "center": - correction[key] = [float(i) for i in val] - else: - correction[key] = float(val) - except AttributeError as exc: - raise AttributeError( - "Energy correction parameters not found, need to generate parameters first!", - ) from exc + for key, val in self.ec.correction.items(): + if key == "correction_type": + correction[key] = val + elif key == "center": + correction[key] = [float(i) for i in val] + else: + correction[key] = float(val) + + if "creation_date" not in correction: + correction["creation_date"] = datetime.now().timestamp() config = {"energy": {"correction": correction}} save_config(config, filename, overwrite) @@ -939,6 +1042,7 @@ def apply_energy_correction( self, correction: dict = None, preview: bool = False, + verbose: bool = None, **kwds, ): """2. step of the energy correction workflow: Apply the enery correction @@ -947,38 +1051,50 @@ def apply_energy_correction( Args: correction (dict, optional): Dictionary containing the correction parameters. Defaults to config["energy"]["calibration"]. - preview (bool): Option to preview the first elements of the data frame. - **kwds: - Keyword args passed to ``EnergyCalibrator.apply_energy_correction``. - preview (bool): Option to preview the first elements of the data frame. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. **kwds: - Keyword args passed to ``EnergyCalibrator.apply_energy_correction``. + Keyword args passed to ``EnergyCalibrator.apply_energy_correction()``. """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + if self._dataframe is not None: - print("Applying energy correction to dataframe...") - self._dataframe, metadata = self.ec.apply_energy_correction( + if verbose: + print("Applying energy correction to dataframe...") + df, metadata = self.ec.apply_energy_correction( df=self._dataframe, correction=correction, + verbose=verbose, **kwds, ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["tof_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.ec.apply_energy_correction( - df=self._timed_dataframe, - correction=correction, - **kwds, - ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.apply_energy_correction( + df=self._timed_dataframe, + correction=correction, + verbose=False, + **kwds, + ) # Add Metadata self._attributes.add( metadata, "energy_correction", ) - if preview: - print(self._dataframe.head(10)) - else: - if self.verbose: - print(self._dataframe) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe) # Energy calibrator workflow # 1. Load and normalize data @@ -1095,7 +1211,7 @@ def find_bias_peaks( Args: ranges (Union[List[Tuple], Tuple]): Tuple of TOF values indicating a range. Alternatively, a list of ranges for all traces can be given. - refid (int, optional): The id of the trace the range refers to. + ref_id (int, optional): The id of the trace the range refers to. Defaults to 0. infer_others (bool, optional): Whether to determine the range for the other traces. Defaults to True. @@ -1154,6 +1270,7 @@ def calibrate_energy_axis( ref_energy: float, method: str = None, energy_scale: str = None, + verbose: bool = None, **kwds, ): """3. Step of the energy calibration workflow: Calculate the calibration @@ -1178,7 +1295,13 @@ def calibrate_energy_axis( - **'binding'**: increasing energy with increasing TOF. Defaults to config["energy"]["energy_scale"] + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds**: Keyword parameters passed to ``EnergyCalibrator.calibrate()``. """ + if verbose is None: + verbose = self.verbose + if method is None: method = self._config["energy"]["calibration_method"] @@ -1190,45 +1313,47 @@ def calibrate_energy_axis( ref_energy=ref_energy, method=method, energy_scale=energy_scale, + verbose=verbose, **kwds, ) - print("Quality of Calibration:") - self.ec.view( - traces=self.ec.traces_normed, - xaxis=self.ec.calibration["axis"], - align=True, - energy_scale=energy_scale, - backend="bokeh", - ) - print("E/TOF relationship:") - self.ec.view( - traces=self.ec.calibration["axis"][None, :], - xaxis=self.ec.tof, - backend="matplotlib", - show_legend=False, - ) - if energy_scale == "kinetic": - plt.scatter( - self.ec.peaks[:, 0], - -(self.ec.biases - self.ec.biases[ref_id]) + ref_energy, - s=50, - c="k", - ) - elif energy_scale == "binding": - plt.scatter( - self.ec.peaks[:, 0], - self.ec.biases - self.ec.biases[ref_id] + ref_energy, - s=50, - c="k", + if verbose: + print("Quality of Calibration:") + self.ec.view( + traces=self.ec.traces_normed, + xaxis=self.ec.calibration["axis"], + align=True, + energy_scale=energy_scale, + backend="bokeh", ) - else: - raise ValueError( - 'energy_scale needs to be either "binding" or "kinetic"', - f", got {energy_scale}.", + print("E/TOF relationship:") + self.ec.view( + traces=self.ec.calibration["axis"][None, :], + xaxis=self.ec.tof, + backend="matplotlib", + show_legend=False, ) - plt.xlabel("Time-of-flight", fontsize=15) - plt.ylabel("Energy (eV)", fontsize=15) - plt.show() + if energy_scale == "kinetic": + plt.scatter( + self.ec.peaks[:, 0], + -(self.ec.biases - self.ec.biases[ref_id]) + ref_energy, + s=50, + c="k", + ) + elif energy_scale == "binding": + plt.scatter( + self.ec.peaks[:, 0], + self.ec.biases - self.ec.biases[ref_id] + ref_energy, + s=50, + c="k", + ) + else: + raise ValueError( + 'energy_scale needs to be either "binding" or "kinetic"', + f", got {energy_scale}.", + ) + plt.xlabel("Time-of-flight", fontsize=15) + plt.ylabel("Energy (eV)", fontsize=15) + plt.show() # 3a. Save energy calibration parameters to config file. def save_energy_calibration( @@ -1246,21 +1371,22 @@ def save_energy_calibration( """ if filename is None: filename = "sed_config.yaml" + if len(self.ec.calibration) == 0: + raise ValueError("No energy calibration parameters to save!") calibration = {} - try: - for key, value in self.ec.calibration.items(): - if key in ["axis", "refid", "Tmat", "bvec"]: - continue - if key == "energy_scale": - calibration[key] = value - elif key == "coeffs": - calibration[key] = [float(i) for i in value] - else: - calibration[key] = float(value) - except AttributeError as exc: - raise AttributeError( - "Energy calibration parameters not found, need to generate parameters first!", - ) from exc + for key, value in self.ec.calibration.items(): + if key in ["axis", "refid", "Tmat", "bvec"]: + continue + if key == "energy_scale": + calibration[key] = value + elif key == "coeffs": + calibration[key] = [float(i) for i in value] + else: + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() + config = {"energy": {"calibration": calibration}} save_config(config, filename, overwrite) print(f'Saved energy calibration parameters to "{filename}".') @@ -1270,6 +1396,7 @@ def append_energy_axis( self, calibration: dict = None, preview: bool = False, + verbose: bool = None, **kwds, ): """4. step of the energy calibration workflow: Apply the calibration function @@ -1282,23 +1409,32 @@ def append_energy_axis( parameters. Overrides calibration from class or config. Defaults to None. preview (bool): Option to preview the first elements of the data frame. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. **kwds: - Keyword args passed to ``EnergyCalibrator.append_energy_axis``. + Keyword args passed to ``EnergyCalibrator.append_energy_axis()``. """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + if self._dataframe is not None: - print("Adding energy column to dataframe:") - self._dataframe, metadata = self.ec.append_energy_axis( + if verbose: + print("Adding energy column to dataframe:") + df, metadata = self.ec.append_energy_axis( df=self._dataframe, calibration=calibration, + verbose=verbose, **kwds, ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["tof_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.ec.append_energy_axis( - df=self._timed_dataframe, - calibration=calibration, - **kwds, - ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.append_energy_axis( + df=self._timed_dataframe, + calibration=calibration, + verbose=False, + **kwds, + ) # Add Metadata self._attributes.add( @@ -1306,11 +1442,17 @@ def append_energy_axis( "energy_calibration", duplicate_policy="merge", ) - if preview: - print(self._dataframe.head(10)) - else: - if self.verbose: - print(self._dataframe) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe) def add_energy_offset( self, @@ -1319,6 +1461,8 @@ def add_energy_offset( weights: Union[float, Sequence[float]] = None, reductions: Union[str, Sequence[str]] = None, preserve_mean: Union[bool, Sequence[bool]] = None, + preview: bool = False, + verbose: bool = None, ) -> None: """Shift the energy axis of the dataframe by a given amount. @@ -1333,18 +1477,26 @@ def add_energy_offset( of dask.dataframe.Series. For example "mean". In this case the function is applied to the column to generate a single value for the whole dataset. If None, the shift is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. Raises: ValueError: If the energy column is not in the dataframe. """ - print("Adding energy offset to dataframe:") + if verbose is None: + verbose = self.verbose + energy_column = self._config["dataframe"]["energy_column"] + if energy_column not in self._dataframe.columns: + raise ValueError( + f"Energy column {energy_column} not found in dataframe! " + "Run `append_energy_axis()` first.", + ) if self.dataframe is not None: - if energy_column not in self._dataframe.columns: - raise ValueError( - f"Energy column {energy_column} not found in dataframe! " - "Run `append energy axis` first.", - ) + if verbose: + print("Adding energy offset to dataframe:") df, metadata = self.ec.add_offsets( df=self._dataframe, constant=constant, @@ -1353,18 +1505,19 @@ def add_energy_offset( weights=weights, reductions=reductions, preserve_mean=preserve_mean, + verbose=verbose, ) - if self._timed_dataframe is not None: - if energy_column in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.ec.add_offsets( - df=self._timed_dataframe, - constant=constant, - columns=columns, - energy_column=energy_column, - weights=weights, - reductions=reductions, - preserve_mean=preserve_mean, - ) + if self._timed_dataframe is not None and energy_column in self._timed_dataframe.columns: + tdf, _ = self.ec.add_offsets( + df=self._timed_dataframe, + constant=constant, + columns=columns, + energy_column=energy_column, + weights=weights, + reductions=reductions, + preserve_mean=preserve_mean, + ) + self._attributes.add( metadata, "add_energy_offset", @@ -1373,8 +1526,14 @@ def add_energy_offset( duplicate_policy="append", ) self._dataframe = df + if self._timed_dataframe is not None and energy_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf else: raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + elif verbose: + print(self._dataframe) def save_energy_offset( self, @@ -1393,13 +1552,19 @@ def save_energy_offset( filename = "sed_config.yaml" if len(self.ec.offsets) == 0: raise ValueError("No energy offset parameters to save!") + + if "creation_date" not in self.ec.offsets.keys(): + self.ec.offsets["creation_date"] = datetime.now().timestamp() + config = {"energy": {"offsets": self.ec.offsets}} save_config(config, filename, overwrite) print(f'Saved energy offset parameters to "{filename}".') def append_tof_ns_axis( self, - **kwargs, + preview: bool = False, + verbose: bool = None, + **kwds, ): """Convert time-of-flight channel steps to nanoseconds. @@ -1407,56 +1572,104 @@ def append_tof_ns_axis( tof_ns_column (str, optional): Name of the generated column containing the time-of-flight in nanosecond. Defaults to config["dataframe"]["tof_ns_column"]. - kwargs: additional arguments are passed to ``energy.tof_step_to_ns``. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: additional arguments are passed to ``EnergyCalibrator.tof_step_to_ns()``. """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + if self._dataframe is not None: - print("Adding time-of-flight column in nanoseconds to dataframe:") + if verbose: + print("Adding time-of-flight column in nanoseconds to dataframe:") # TODO assert order of execution through metadata - self._dataframe, metadata = self.ec.append_tof_ns_axis( + df, metadata = self.ec.append_tof_ns_axis( df=self._dataframe, - **kwargs, + **kwds, ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["tof_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.ec.append_tof_ns_axis( - df=self._timed_dataframe, - **kwargs, - ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.append_tof_ns_axis( + df=self._timed_dataframe, + **kwds, + ) + self._attributes.add( metadata, "tof_ns_conversion", - duplicate_policy="append", + duplicate_policy="overwrite", ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe) - def align_dld_sectors(self, sector_delays: np.ndarray = None, **kwargs): + def align_dld_sectors( + self, + sector_delays: np.ndarray = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): """Align the 8s sectors of the HEXTOF endstation. Args: sector_delays (np.ndarray, optional): Array containing the sector delays. Defaults to config["dataframe"]["sector_delays"]. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: additional arguments are passed to ``EnergyCalibrator.align_dld_sectors()``. """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + if self._dataframe is not None: - print("Aligning 8s sectors of dataframe") + if verbose: + print("Aligning 8s sectors of dataframe") # TODO assert order of execution through metadata - self._dataframe, metadata = self.ec.align_dld_sectors( + + df, metadata = self.ec.align_dld_sectors( df=self._dataframe, sector_delays=sector_delays, - **kwargs, + **kwds, ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["tof_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.ec.align_dld_sectors( - df=self._timed_dataframe, - sector_delays=sector_delays, - **kwargs, - ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.align_dld_sectors( + df=self._timed_dataframe, + sector_delays=sector_delays, + **kwds, + ) + self._attributes.add( metadata, "dld_sector_alignment", duplicate_policy="raise", ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe) # Delay calibration function def calibrate_delay_axis( @@ -1464,6 +1677,7 @@ def calibrate_delay_axis( delay_range: Tuple[float, float] = None, datafile: str = None, preview: bool = False, + verbose: bool = None, **kwds, ): """Append delay column to dataframe. Either provide delay ranges, or read @@ -1474,27 +1688,25 @@ def calibrate_delay_axis( picoseconds. Defaults to None. datafile (str, optional): The file from which to read the delay ranges. Defaults to None. - preview (bool): Option to preview the first elements of the data frame. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. **kwds: Keyword args passed to ``DelayCalibrator.append_delay_axis``. """ + if verbose is None: + verbose = self.verbose + + adc_column = self._config["dataframe"]["adc_column"] + if adc_column not in self._dataframe.columns: + raise ValueError(f"ADC column {adc_column} not found in dataframe, cannot calibrate!") + if self._dataframe is not None: - print("Adding delay column to dataframe:") + if verbose: + print("Adding delay column to dataframe:") - if delay_range is not None: - self._dataframe, metadata = self.dc.append_delay_axis( - self._dataframe, - delay_range=delay_range, - **kwds, - ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["adc_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.dc.append_delay_axis( - self._timed_dataframe, - delay_range=delay_range, - **kwds, - ) - else: - if datafile is None: + if datafile is None: + if len(self.dc.calibration) == 0: try: datafile = self._files[0] except IndexError: @@ -1504,30 +1716,38 @@ def calibrate_delay_axis( ) raise - self._dataframe, metadata = self.dc.append_delay_axis( - self._dataframe, + df, metadata = self.dc.append_delay_axis( + self._dataframe, + delay_range=delay_range, + datafile=datafile, + verbose=verbose, + **kwds, + ) + if self._timed_dataframe is not None and adc_column in self._timed_dataframe.columns: + tdf, _ = self.dc.append_delay_axis( + self._timed_dataframe, + delay_range=delay_range, datafile=datafile, + verbose=False, **kwds, ) - if self._timed_dataframe is not None: - if self._config["dataframe"]["adc_column"] in self._timed_dataframe.columns: - self._timed_dataframe, _ = self.dc.append_delay_axis( - self._timed_dataframe, - datafile=datafile, - **kwds, - ) # Add Metadata self._attributes.add( metadata, "delay_calibration", - duplicate_policy="merge", + duplicate_policy="overwrite", ) - if preview: - print(self._dataframe.head(10)) - else: - if self.verbose: - print(self._dataframe) + self._dataframe = df + if self._timed_dataframe is not None and adc_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe) def save_delay_calibration( self, @@ -1545,9 +1765,23 @@ def save_delay_calibration( if filename is None: filename = "sed_config.yaml" + if len(self.dc.calibration) == 0: + raise ValueError("No delay calibration parameters to save!") + calibration = {} + for key, value in self.dc.calibration.items(): + if key == "datafile": + calibration[key] = value + elif key in ["adc_range", "delay_range", "delay_range_mm"]: + calibration[key] = [float(i) for i in value] + else: + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() + config = { "delay": { - "calibration": self.dc.calibration, + "calibration": calibration, }, } save_config(config, filename, overwrite) @@ -1557,14 +1791,17 @@ def add_delay_offset( constant: float = None, flip_delay_axis: bool = None, columns: Union[str, Sequence[str]] = None, - weights: Union[float, Sequence[float]] = None, + weights: Union[float, Sequence[float]] = 1.0, reductions: Union[str, Sequence[str]] = None, - preserve_mean: Union[bool, Sequence[bool]] = None, + preserve_mean: Union[bool, Sequence[bool]] = False, + preview: bool = False, + verbose: bool = None, ) -> None: """Shift the delay axis of the dataframe by a constant or other columns. Args: constant (float, optional): The constant to shift the delay axis by. + flip_delay_axis (bool, optional): Option to reverse the direction of the delay axis. columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. weights (Union[float, Sequence[float]]): weights to apply to the columns. Can also be used to flip the sign (e.g. -1). Defaults to 1. @@ -1574,16 +1811,24 @@ def add_delay_offset( of dask.dataframe.Series. For example "mean". In this case the function is applied to the column to generate a single value for the whole dataset. If None, the shift is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. - Returns: - None + Raises: + ValueError: If the delay column is not in the dataframe. """ - print("Adding delay offset to dataframe:") + if verbose is None: + verbose = self.verbose + delay_column = self._config["dataframe"]["delay_column"] if delay_column not in self._dataframe.columns: raise ValueError(f"Delay column {delay_column} not found in dataframe! ") if self.dataframe is not None: + if verbose: + print("Adding delay offset to dataframe:") df, metadata = self.dc.add_offsets( df=self._dataframe, constant=constant, @@ -1593,9 +1838,9 @@ def add_delay_offset( weights=weights, reductions=reductions, preserve_mean=preserve_mean, + verbose=verbose, ) - if self._timed_dataframe is not None: - if delay_column in self._timed_dataframe.columns: + if self._timed_dataframe is not None and delay_column in self._timed_dataframe.columns: tdf, _ = self.dc.add_offsets( df=self._timed_dataframe, constant=constant, @@ -1605,10 +1850,12 @@ def add_delay_offset( weights=weights, reductions=reductions, preserve_mean=preserve_mean, + verbose=False, ) + self._attributes.add( metadata, - "add_delay_offset", + "delay_offset", duplicate_policy="append", ) self._dataframe = df @@ -1616,6 +1863,11 @@ def add_delay_offset( self._timed_dataframe = tdf else: raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe) def save_delay_offsets( self, @@ -1634,6 +1886,10 @@ def save_delay_offsets( filename = "sed_config.yaml" if len(self.dc.offsets) == 0: raise ValueError("No delay offset parameters to save!") + + if "creation_date" not in self.ec.offsets.keys(): + self.ec.offsets["creation_date"] = datetime.now().timestamp() + config = { "delay": { "offsets": self.dc.offsets, @@ -1656,8 +1912,9 @@ def save_workflow_params( Defaults to False. """ for method in [ - self.save_momentum_calibration, self.save_splinewarp, + self.save_transformations, + self.save_momentum_calibration, self.save_energy_correction, self.save_energy_calibration, self.save_energy_offset, @@ -1684,7 +1941,7 @@ def add_jitter( jittering noise. If one number is given, the same is used for all axes. For uniform noise (default) it will cover the interval [-amp, +amp]. Defaults to config["dataframe"]["jitter_amps"]. - **kwds: additional keyword arguments passed to apply_jitter + **kwds: additional keyword arguments passed to ``apply_jitter``. """ if cols is None: cols = self._config["dataframe"]["jitter_cols"] @@ -1717,6 +1974,7 @@ def add_jitter( metadata = [] for col in cols: metadata.append(col) + # TODO: allow only appending if columns are not jittered yet self._attributes.add(metadata, "jittering", duplicate_policy="append") def add_time_stamped_data( @@ -1739,7 +1997,7 @@ def add_time_stamped_data( If omitted, data are retrieved from the epics archiver. archiver_channel (str, optional): EPICS archiver channel from which to retrieve data. Either this or data and time_stamps have to be present. - **kwds: additional keyword arguments passed to add_time_stamped_data + **kwds: additional keyword arguments passed to ``add_time_stamped_data``. """ time_stamp_column = kwds.pop( "time_stamp_column", diff --git a/sed/loader/mpes/loader.py b/sed/loader/mpes/loader.py index 16df4710..11c7b9b4 100644 --- a/sed/loader/mpes/loader.py +++ b/sed/loader/mpes/loader.py @@ -840,7 +840,7 @@ def gather_metadata( ].keys() lens_volts = np.array( - [metadata["file"][f"KTOF:Lens:{lens}:V"] for lens in lens_list], + [metadata["file"].get(f"KTOF:Lens:{lens}:V", np.NaN) for lens in lens_list], ) for mode, value in self._config["metadata"]["lens_mode_config"].items(): lens_volts_config = np.array([value[k] for k in lens_list]) diff --git a/tests/calibrator/test_delay.py b/tests/calibrator/test_delay.py index 3d386967..6a848e83 100644 --- a/tests/calibrator/test_delay.py +++ b/tests/calibrator/test_delay.py @@ -96,7 +96,12 @@ def test_delay_parameters_from_delay_range_mm() -> None: dc = DelayCalibrator(config=config) with pytest.raises(NotImplementedError): dc.append_delay_axis(df, delay_range_mm=(1, 15)) - df, metadata = dc.append_delay_axis(df, delay_range_mm=(1, 15), time0=1) + df, metadata = dc.append_delay_axis( + df, + delay_range_mm=(1, 15), + time0=1, + adc_range=config["delay"]["adc_range"], + ) assert "delay" in df.columns assert "delay_range" in metadata["calibration"] assert "adc_range" in metadata["calibration"] @@ -171,7 +176,7 @@ def test_add_offset_from_args(df=test_dataframe) -> None: cfg_ = cfg.copy() cfg_.pop("delay") config = parse_config( - config=cfg, + config=cfg_, folder_config={}, user_config={}, system_config={}, @@ -182,11 +187,33 @@ def test_add_offset_from_args(df=test_dataframe) -> None: constant=1, flip_delay_axis=True, columns="bam", - weights=0.001, ) assert "delay" in df.columns assert "bam" in dc.offsets.keys() expected = -np.array( - delay_stage_vals + bam_vals * 0.001 + 1, + delay_stage_vals + bam_vals * 1 + 1, + ) + np.testing.assert_allclose(expected, df["delay"]) + + +def test_add_offset_from_dict(df=test_dataframe) -> None: + """test that the timing offset is corrected for correctly from config""" + cfg_ = cfg.copy() + offsets = cfg["delay"]["offsets"] # type:ignore + offsets["bam"].pop("weight") + offsets["flip_delay_axis"] = False + cfg_.pop("delay") + config = parse_config( + config=cfg_, + folder_config={}, + user_config={}, + system_config={}, ) + + expected = np.asarray(delay_stage_vals + bam_vals * 1 + 1) + + dc = DelayCalibrator(config=config) + df, _ = dc.add_offsets(df.copy(), offsets=offsets) + assert "delay" in df.columns + assert "bam" in dc.offsets.keys() np.testing.assert_allclose(expected, df["delay"]) diff --git a/tests/calibrator/test_energy.py b/tests/calibrator/test_energy.py index ed5a40bf..80b8dd81 100644 --- a/tests/calibrator/test_energy.py +++ b/tests/calibrator/test_energy.py @@ -8,6 +8,7 @@ from importlib.util import find_spec from typing import Any from typing import Dict +from typing import Literal import dask.dataframe import numpy as np @@ -565,13 +566,18 @@ def test_apply_energy_correction_raises(correction_type: str) -> None: assert config["dataframe"]["corrected_tof_column"] in df.columns -def test_add_offsets_functionality() -> None: +@pytest.mark.parametrize( + "energy_scale", + ["kinetic", "binding"], +) +def test_add_offsets_functionality(energy_scale: str) -> None: """test the add_offsets function""" + scale_sign: Literal[-1, 1] = -1 if energy_scale == "binding" else 1 config = parse_config( config={ "energy": { "calibration": { - "energy_scale": "kinetic", + "energy_scale": energy_scale, }, "offsets": { "constant": 1, @@ -610,13 +616,14 @@ def test_add_offsets_functionality() -> None: loader=get_loader("flash", config=config), ) res, meta = ec.add_offsets(t_df) - exp_vals = df["energy"].copy() + 1 - exp_vals += df["off1"] - df["off1"].mean() - exp_vals -= df["off2"] - exp_vals += df["off3"].mean() + exp_vals = df["energy"].copy() + 1 * scale_sign + exp_vals += (df["off1"] - df["off1"].mean()) * scale_sign + exp_vals -= df["off2"] * scale_sign + exp_vals += df["off3"].mean() * scale_sign np.testing.assert_allclose(res["energy"].values, exp_vals.values) - exp_meta = params.copy() + exp_meta: Dict[str, Any] = {} exp_meta["applied"] = True + exp_meta["offsets"] = ec.offsets assert meta == exp_meta # test with explicit params ec = EnergyCalibrator( @@ -624,12 +631,26 @@ def test_add_offsets_functionality() -> None: loader=get_loader("flash", config=config), ) t_df = dask.dataframe.from_pandas(df.copy(), npartitions=2) - res, meta = ec.add_offsets(t_df, **params) # type: ignore[arg-type] # pylint disable=unexpected-keyword-arg + res, meta = ec.add_offsets(t_df, **params) # type: ignore np.testing.assert_allclose(res["energy"].values, exp_vals.values) - params["applied"] = True - assert meta == params - - # test with different energy scale + exp_meta = {} + exp_meta["applied"] = True + exp_meta["offsets"] = ec.offsets + assert meta == exp_meta + # test with minimal parameters + ec = EnergyCalibrator( + config=config, + loader=get_loader("flash", config=config), + ) + t_df = dask.dataframe.from_pandas(df.copy(), npartitions=2) + res, meta = ec.add_offsets(t_df, weights=-1, columns="off1") + res, meta = ec.add_offsets(res, columns="off1") + exp_vals = df["energy"].copy() + np.testing.assert_allclose(res["energy"].values, exp_vals.values) + exp_meta = {} + exp_meta["applied"] = True + exp_meta["offsets"] = ec.offsets + assert meta == exp_meta def test_add_offset_raises() -> None: @@ -657,13 +678,6 @@ def test_add_offset_raises() -> None: }, ) t_df = dask.dataframe.from_pandas(df.copy(), npartitions=2) - # no sign in config - with pytest.raises(KeyError): - cfg = deepcopy(cfg_dict) - cfg["energy"]["offsets"]["off1"].pop("weight") - config = parse_config(config=cfg, folder_config={}, user_config={}, system_config={}) - ec = EnergyCalibrator(config=cfg, loader=get_loader("flash", config=config)) - _ = ec.add_offsets(t_df) # no energy scale with pytest.raises(ValueError): diff --git a/tests/calibrator/test_momentum.py b/tests/calibrator/test_momentum.py index c4bd7a99..722aada2 100644 --- a/tests/calibrator/test_momentum.py +++ b/tests/calibrator/test_momentum.py @@ -162,7 +162,7 @@ def test_apply_correction() -> None: assert "Xm" in df.columns assert "Ym" in df.columns assert metadata["correction"]["applied"] is True - np.testing.assert_equal(metadata["correction"]["prefs"], features) + np.testing.assert_equal(metadata["correction"]["reference_points"], features) assert metadata["correction"]["cdeform_field"].shape == momentum_map.shape assert metadata["correction"]["rdeform_field"].shape == momentum_map.shape @@ -378,3 +378,19 @@ def test_momentum_calibration_two_points() -> None: metadata["calibration"][key], value, ) + # Test with passing calibration parameters + calibration = mc.calibration.copy() + calibration.pop("creation_date") + df, _, _ = get_loader(loader_name="mpes", config=config).read_dataframe( + folders=df_folder, + collect_metadata=False, + ) + mc = MomentumCorrector(config=config) + df, metadata = mc.append_k_axis(df, **calibration) + assert "kx" in df.columns + assert "ky" in df.columns + for key, value in mc.calibration.items(): + np.testing.assert_equal( + metadata["calibration"][key], + value, + ) diff --git a/tests/test_processor.py b/tests/test_processor.py index 0e4a3161..3d39f6e5 100644 --- a/tests/test_processor.py +++ b/tests/test_processor.py @@ -8,7 +8,6 @@ from importlib.util import find_spec from pathlib import Path from typing import Any -from typing import cast from typing import Dict from typing import List from typing import Tuple @@ -20,7 +19,6 @@ from sed import SedProcessor from sed.core.config import parse_config -from sed.loader.flash.loader import FlashLoader from sed.loader.loader_interface import get_loader # pylint: disable=duplicate-code @@ -61,6 +59,7 @@ def test_processor_from_dataframe() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) for column in dataframe.columns: assert (dataframe[column].compute() == processor.dataframe[column].compute()).all() @@ -80,6 +79,7 @@ def test_processor_from_files() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) for column in dataframe.columns: assert (dataframe[column].compute() == processor.dataframe[column].compute()).all() @@ -99,6 +99,7 @@ def test_processor_from_folders() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) for column in dataframe.columns: assert (dataframe[column].compute() == processor.dataframe[column].compute()).all() @@ -119,6 +120,7 @@ def test_processor_from_runs() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) assert processor.loader.runs == runs for column in dataframe.columns: @@ -141,6 +143,7 @@ def test_additional_parameter_to_loader() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) assert processor.files[0].find("json") > -1 @@ -153,6 +156,7 @@ def test_repr() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) processor_str = str(processor) assert processor_str.find("No Data loaded") > 0 @@ -172,6 +176,7 @@ def test_attributes_setters() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) processor.load(files=files, metadata={"test": {"key1": "value1"}}) dataframe = processor.dataframe @@ -201,6 +206,7 @@ def test_copy_tool() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) assert processor.use_copy_tool is False config = { @@ -217,6 +223,7 @@ def test_copy_tool() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) assert processor.use_copy_tool is True processor.load(files=files) @@ -264,18 +271,14 @@ def test_copy_tool() -> None: ) def test_momentum_correction_workflow(features: np.ndarray) -> None: """Test for the momentum correction workflow""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) processor.bin_and_load_momentum_calibration(apply=True) assert processor.mc.slice is not None @@ -308,6 +311,7 @@ def test_momentum_correction_workflow(features: np.ndarray) -> None: folder_config=f"sed_config_momentum_correction{len(features)}.yaml", user_config={}, system_config={}, + verbose=True, ) processor.generate_splinewarp() assert len(processor.mc.pouter_ord) == rotsym @@ -319,35 +323,59 @@ def test_momentum_correction_workflow(features: np.ndarray) -> None: def test_pose_adjustment() -> None: """Test for the pose correction and application of momentum correction workflow""" - config = parse_config( - config={"core": {"loader": "mpes"}}, + config = {"core": {"loader": "mpes"}} + processor = SedProcessor( + folder=df_folder, + config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) + # pose adjustment w/o loaded image + processor.pose_adjustment(**adjust_params, use_correction=False, apply=True) # type: ignore + + processor.bin_and_load_momentum_calibration(apply=True) + # test pose adjustment + processor.pose_adjustment(**adjust_params, use_correction=False, apply=True) # type: ignore + processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) with pytest.raises(ValueError): - processor.pose_adjustment(**adjust_params, use_correction=False, apply=True) # type: ignore[arg-type] - + processor.apply_momentum_correction() processor.bin_and_load_momentum_calibration(apply=True) - # test pose adjustment - processor.pose_adjustment(**adjust_params, use_correction=False, apply=True) # type: ignore[arg-type] + processor.define_features( + features=feature7, + rotation_symmetry=6, + include_center=True, + apply=True, + ) + processor.generate_splinewarp(use_center=True) + processor.pose_adjustment(**adjust_params, apply=True) # type: ignore[arg-type] + processor.apply_momentum_correction() + assert "Xm" in processor.dataframe.columns + assert "Ym" in processor.dataframe.columns + +def test_pose_adjustment_save_load() -> None: + """Test for the saving and loading of pose correction and application of momentum correction + workflow""" + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) - with pytest.raises(ValueError): - processor.apply_momentum_correction() + # pose adjustment w/o loaded image processor.bin_and_load_momentum_calibration(apply=True) processor.define_features( features=feature7, @@ -356,10 +384,22 @@ def test_pose_adjustment() -> None: apply=True, ) processor.generate_splinewarp(use_center=True) + processor.save_splinewarp(filename="sed_config_pose_adjustments.yaml") processor.pose_adjustment(**adjust_params, apply=True) # type: ignore[arg-type] + processor.save_transformations(filename="sed_config_pose_adjustments.yaml") + processor = SedProcessor( + folder=df_folder, + config=config, + folder_config="sed_config_pose_adjustments.yaml", + user_config={}, + system_config={}, + verbose=True, + ) processor.apply_momentum_correction() assert "Xm" in processor.dataframe.columns assert "Ym" in processor.dataframe.columns + assert "momentum_correction" in processor.attributes + os.remove("sed_config_pose_adjustments.yaml") point_a = [308, 345] @@ -369,18 +409,14 @@ def test_pose_adjustment() -> None: def test_momentum_calibration_workflow() -> None: """Test the calibration of the momentum axes""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) with pytest.raises(ValueError): processor.apply_momentum_calibration() @@ -409,6 +445,7 @@ def test_momentum_calibration_workflow() -> None: folder_config="sed_config_momentum_calibration.yaml", user_config={}, system_config={}, + verbose=True, ) processor.apply_momentum_calibration() assert ( @@ -422,18 +459,14 @@ def test_momentum_calibration_workflow() -> None: def test_energy_correction() -> None: """Test energy correction workflow.""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) with pytest.raises(ValueError): processor.apply_energy_correction() @@ -454,6 +487,7 @@ def test_energy_correction() -> None: config=config, user_config={}, system_config={}, + verbose=True, ) processor.adjust_energy_correction(apply=True) assert processor.ec.correction["correction_type"] == "Lorentzian" @@ -481,20 +515,16 @@ def test_energy_calibration_workflow(energy_scale: str, calibration_method: str) Args: energy_scale (str): Energy scale - calibration_method (str): _description_ + calibration_method (str): calibration method to use """ - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) with pytest.raises(ValueError): processor.load_bias_series() @@ -573,10 +603,11 @@ def test_energy_calibration_workflow(energy_scale: str, calibration_method: str) folder_config=f"sed_config_energy_calibration_{energy_scale}-{calibration_method}.yaml", user_config={}, system_config={}, + verbose=True, ) with pytest.raises(ValueError): processor.add_energy_offset(constant=1) - processor.append_energy_axis(preview=True) + processor.append_energy_axis(preview=False) assert "energy" in processor.dataframe.columns assert processor.attributes["energy_calibration"]["calibration"]["energy_scale"] == energy_scale os.remove(f"sed_config_energy_calibration_{energy_scale}-{calibration_method}.yaml") @@ -589,7 +620,15 @@ def test_energy_calibration_workflow(energy_scale: str, calibration_method: str) def test_align_dld_sectors() -> None: """Test alignment of DLD sectors for flash detector""" - config = df_folder + "../flash/config.yaml" + config = parse_config( + df_folder + "../flash/config.yaml", + folder_config={}, + user_config={}, + system_config={}, + ) + config["core"]["paths"]["data_parquet_dir"] = ( + config["core"]["paths"]["data_parquet_dir"] + "_align_dld_sectors" + ) processor = SedProcessor( folder=df_folder + "../flash/", config=config, @@ -597,6 +636,7 @@ def test_align_dld_sectors() -> None: folder_config={}, user_config={}, system_config={}, + verbose=True, ) assert "dldTimeSteps" in processor.dataframe.columns assert "dldSectorID" in processor.dataframe.columns @@ -629,19 +669,21 @@ def test_align_dld_sectors() -> None: np.testing.assert_allclose(tof_ref_array, tof_aligned_array + sector_delays[:, np.newaxis]) # cleanup flash inermediaries - _, parquet_data_dir = cast(FlashLoader, processor.loader).initialize_paths() + parquet_data_dir = config["core"]["paths"]["data_parquet_dir"] for file in os.listdir(Path(parquet_data_dir, "buffer")): os.remove(Path(parquet_data_dir, "buffer", file)) def test_append_tof_ns_axis() -> None: """Test the append_tof_ns_axis function""" + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, - config={"core": {"loader": "mpes"}}, + config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) processor.append_tof_ns_axis() assert processor.config["dataframe"]["tof_ns_column"] in processor.dataframe @@ -649,24 +691,30 @@ def test_append_tof_ns_axis() -> None: def test_delay_calibration_workflow() -> None: """Test the delay calibration workflow""" - config = parse_config( - config={"core": {"loader": "mpes"}}, + config = {"core": {"loader": "mpes"}} + processor = SedProcessor( + folder=df_folder, + config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) + delay_range = (-500, 1500) + processor.calibrate_delay_axis(delay_range=delay_range, preview=False) + # read from datafile processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) - delay_range = (-500, 1500) - processor.calibrate_delay_axis(delay_range=delay_range, preview=False) - # read from datafile + with pytest.raises(ValueError): + processor.add_delay_offset(constant=1) with pytest.raises(NotImplementedError): - processor.calibrate_delay_axis(preview=True) + processor.calibrate_delay_axis() processor.calibrate_delay_axis( p1_key="@trARPES:DelayStage:p1", p2_key="@trARPES:DelayStage:p2", @@ -674,22 +722,46 @@ def test_delay_calibration_workflow() -> None: preview=True, ) assert "delay" in processor.dataframe.columns + creation_date_calibration = processor.dc.calibration["creation_date"] + expected = -1 * ( + processor.dataframe["delay"].compute() + 1 + processor.dataframe["ADC"].compute() + ) + processor.add_delay_offset(constant=1, columns="ADC", flip_delay_axis=True) + creation_date_offsets = processor.dc.offsets["creation_date"] + np.testing.assert_allclose(expected, processor.dataframe["delay"].compute()) + # test saving and loading + processor.save_delay_calibration(filename="sed_config_delay_calibration.yaml") + processor.save_delay_offsets(filename="sed_config_delay_calibration.yaml") + processor = SedProcessor( + folder=df_folder + "../mpes/", + config=config, + folder_config="sed_config_delay_calibration.yaml", + user_config={}, + system_config={}, + verbose=True, + ) + processor.calibrate_delay_axis() + assert "delay" in processor.dataframe.columns + assert ( + processor.attributes["delay_calibration"]["calibration"]["creation_date"] + == creation_date_calibration + ) + processor.add_delay_offset(preview=True) + assert processor.attributes["delay_offset"]["offsets"]["creation_date"] == creation_date_offsets + np.testing.assert_allclose(expected, processor.dataframe["delay"].compute()) + os.remove("sed_config_delay_calibration.yaml") def test_filter_column() -> None: """Test the jittering function""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) low, high = np.quantile(processor.dataframe["X"].compute(), [0.1, 0.9]) processor.filter_column("X", low, high) @@ -703,18 +775,14 @@ def test_filter_column() -> None: def test_add_jitter() -> None: """Test the jittering function""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) res1 = processor.dataframe["X"].compute() res1a = processor.dataframe["ADC"].compute() @@ -737,6 +805,7 @@ def test_add_time_stamped_data() -> None: user_config={}, system_config={}, time_stamps=True, + verbose=True, ) df_ts = processor.dataframe.timeStamps.compute().values data = np.linspace(0, 1, 20) @@ -757,18 +826,14 @@ def test_add_time_stamped_data() -> None: def test_event_histogram() -> None: """Test histogram plotting function""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) processor.view_event_histogram(dfpid=0) with pytest.raises(ValueError): @@ -777,18 +842,14 @@ def test_event_histogram() -> None: def test_compute() -> None: """Test binning of final result""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) bins = [10, 10, 10, 10] axes = ["X", "Y", "t", "ADC"] @@ -800,18 +861,14 @@ def test_compute() -> None: def test_compute_with_filter() -> None: """Test binning of final result using filters""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) bins = [10, 10, 10, 10] axes = ["X", "Y", "t", "ADC"] @@ -857,18 +914,14 @@ def test_compute_with_filter() -> None: def test_compute_with_normalization() -> None: """Test binning of final result with histogram normalization""" - config = parse_config( - config={"core": {"loader": "mpes"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}} processor = SedProcessor( folder=df_folder, config=config, folder_config={}, user_config={}, system_config={}, + verbose=True, ) bins = [10, 10, 10, 5] axes = ["X", "Y", "t", "ADC"] @@ -902,12 +955,7 @@ def test_compute_with_normalization() -> None: def test_get_normalization_histogram() -> None: """Test the generation function for the normalization histogram""" - config = parse_config( - config={"core": {"loader": "mpes"}, "dataframe": {"time_stamp_alias": "timeStamps"}}, - folder_config={}, - user_config={}, - system_config={}, - ) + config = {"core": {"loader": "mpes"}, "dataframe": {"time_stamp_alias": "timeStamps"}} processor = SedProcessor( folder=df_folder, config=config, @@ -915,6 +963,7 @@ def test_get_normalization_histogram() -> None: user_config={}, system_config={}, time_stamps=True, + verbose=True, ) bins = [10, 10, 10, 5] axes = ["X", "Y", "t", "ADC"] @@ -980,6 +1029,7 @@ def test_save() -> None: system_config={}, metadata=metadata, collect_metadata=True, + verbose=True, ) processor.apply_momentum_calibration() processor.append_energy_axis() diff --git a/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data.ipynb b/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data.ipynb index 6c39b602..386e278a 100644 --- a/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data.ipynb +++ b/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data.ipynb @@ -70,36 +70,36 @@ { "cell_type": "code", "execution_count": null, - "id": "0a0d336b", + "id": "50d0a3b3", "metadata": {}, "outputs": [], "source": [ - "# Plot of the count rate through the scan\n", - "rate, secs = sp.loader.get_count_rate(range(100))\n", - "plt.plot(secs, rate)" + "# Apply jittering to X, Y, t, ADC columns.\n", + "# Columns are defined in the config, or can be provided as list.\n", + "sp.add_jitter()" ] }, { "cell_type": "code", "execution_count": null, - "id": "dfb42777", + "id": "0a0d336b", "metadata": {}, "outputs": [], "source": [ - "# The time elapsed in the scan\n", - "sp.loader.get_elapsed_time()" + "# Plot of the count rate through the scan\n", + "rate, secs = sp.loader.get_count_rate(range(100))\n", + "plt.plot(secs, rate)" ] }, { "cell_type": "code", "execution_count": null, - "id": "85ac3c83", + "id": "dfb42777", "metadata": {}, "outputs": [], "source": [ - "# Apply jittering to X, Y, t, ADC columns.\n", - "# Columns are defined in the config, or can be provided as list.\n", - "sp.add_jitter()" + "# The time elapsed in the scan\n", + "sp.loader.get_elapsed_time()" ] }, { @@ -642,7 +642,7 @@ "interpreter": { "hash": "728003ee06929e5fa5ff815d1b96bf487266025e4b7440930c6bf4536d02d243" }, - "kernelspec": { + "kernelspec": { "display_name": "sed_poetry", "language": "python", "name": "python3"