From da0416f8c56d9dfce7e7f1d449aa342b3c9dda56 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 25 Oct 2024 11:04:32 +0000 Subject: [PATCH] Update documentation --- sed/2.1.0/_modules/index.html | 510 +++ sed/2.1.0/_modules/sed/binning/binning.html | 1026 ++++++ sed/2.1.0/_modules/sed/binning/numba_bin.html | 744 ++++ sed/2.1.0/_modules/sed/binning/utils.html | 666 ++++ sed/2.1.0/_modules/sed/calibrator/delay.html | 910 +++++ sed/2.1.0/_modules/sed/calibrator/energy.html | 2910 ++++++++++++++++ .../_modules/sed/calibrator/momentum.html | 2658 ++++++++++++++ sed/2.1.0/_modules/sed/core/config.html | 721 ++++ sed/2.1.0/_modules/sed/core/dfops.html | 932 +++++ sed/2.1.0/_modules/sed/core/metadata.html | 613 ++++ sed/2.1.0/_modules/sed/core/processor.html | 3038 +++++++++++++++++ sed/2.1.0/_modules/sed/dataset/dataset.html | 905 +++++ sed/2.1.0/_modules/sed/diagnostics.html | 650 ++++ sed/2.1.0/_modules/sed/io/hdf5.html | 679 ++++ sed/2.1.0/_modules/sed/io/nexus.html | 536 +++ sed/2.1.0/_modules/sed/io/tiff.html | 709 ++++ .../_modules/sed/loader/base/loader.html | 673 ++++ .../_modules/sed/loader/flash/loader.html | 1424 ++++++++ .../_modules/sed/loader/flash/metadata.html | 615 ++++ .../_modules/sed/loader/generic/loader.html | 649 ++++ .../_modules/sed/loader/loader_interface.html | 553 +++ sed/2.1.0/_modules/sed/loader/mirrorutil.html | 850 +++++ .../_modules/sed/loader/mpes/loader.html | 1454 ++++++++ sed/2.1.0/_modules/sed/loader/sxp/loader.html | 1473 ++++++++ sed/2.1.0/_modules/sed/loader/utils.html | 686 ++++ sed/2.1.0/_sources/index.md.txt | 54 + sed/2.1.0/_sources/misc/contributing.rst.txt | 132 + sed/2.1.0/_sources/misc/contribution.md.txt | 8 + sed/2.1.0/_sources/misc/maintain.rst.txt | 156 + sed/2.1.0/_sources/sed/api.rst.txt | 17 + sed/2.1.0/_sources/sed/binning.rst.txt | 21 + sed/2.1.0/_sources/sed/calibrator.rst.txt | 20 + sed/2.1.0/_sources/sed/config.rst.txt | 8 + sed/2.1.0/_sources/sed/core.rst.txt | 5 + sed/2.1.0/_sources/sed/dataset.rst.txt | 328 ++ sed/2.1.0/_sources/sed/dfops.rst.txt | 5 + sed/2.1.0/_sources/sed/diagnostic.rst.txt | 5 + sed/2.1.0/_sources/sed/io.rst.txt | 5 + sed/2.1.0/_sources/sed/loader.rst.txt | 56 + sed/2.1.0/_sources/sed/metadata.rst.txt | 5 + .../user_guide/advanced_topics.md.txt | 6 + sed/2.1.0/_sources/user_guide/config.md.txt | 29 + sed/2.1.0/_sources/user_guide/index.md.txt | 29 + .../_sources/user_guide/installation.md.txt | 74 + sed/2.1.0/_sources/workflows/index.md.txt | 11 + sed/2.1.0/_static/basic.css | 921 +++++ sed/2.1.0/_static/doctools.js | 156 + sed/2.1.0/_static/documentation_options.js | 14 + sed/2.1.0/_static/file.png | Bin 0 -> 286 bytes sed/2.1.0/_static/language_data.js | 199 ++ sed/2.1.0/_static/minus.png | Bin 0 -> 90 bytes .../_static/nbsphinx-broken-thumbnail.svg | 9 + sed/2.1.0/_static/nbsphinx-code-cells.css | 259 ++ sed/2.1.0/_static/nbsphinx-gallery.css | 31 + sed/2.1.0/_static/nbsphinx-no-thumbnail.svg | 9 + sed/2.1.0/_static/plus.png | Bin 0 -> 90 bytes sed/2.1.0/_static/pygments.css | 152 + sed/2.1.0/_static/scripts/bootstrap.js | 3 + .../_static/scripts/bootstrap.js.LICENSE.txt | 5 + sed/2.1.0/_static/scripts/bootstrap.js.map | 1 + sed/2.1.0/_static/scripts/fontawesome.js | 3 + .../scripts/fontawesome.js.LICENSE.txt | 5 + sed/2.1.0/_static/scripts/fontawesome.js.map | 1 + .../_static/scripts/pydata-sphinx-theme.js | 2 + .../scripts/pydata-sphinx-theme.js.map | 1 + sed/2.1.0/_static/searchtools.js | 566 +++ sed/2.1.0/_static/sphinx_highlight.js | 144 + .../_static/styles/pydata-sphinx-theme.css | 32 + .../styles/pydata-sphinx-theme.css.map | 1 + sed/2.1.0/_static/styles/theme.css | 2 + .../fontawesome/webfonts/fa-brands-400.ttf | Bin 0 -> 209128 bytes .../fontawesome/webfonts/fa-brands-400.woff2 | Bin 0 -> 117852 bytes .../fontawesome/webfonts/fa-regular-400.ttf | Bin 0 -> 67860 bytes .../fontawesome/webfonts/fa-regular-400.woff2 | Bin 0 -> 25392 bytes .../fontawesome/webfonts/fa-solid-900.ttf | Bin 0 -> 420332 bytes .../fontawesome/webfonts/fa-solid-900.woff2 | Bin 0 -> 156400 bytes sed/2.1.0/_static/webpack-macros.html | 24 + sed/2.1.0/genindex.html | 1458 ++++++++ sed/2.1.0/index.html | 537 +++ sed/2.1.0/misc/contributing.html | 687 ++++ sed/2.1.0/misc/contribution.html | 555 +++ sed/2.1.0/misc/maintain.html | 707 ++++ sed/2.1.0/objects.inv | Bin 0 -> 7538 bytes sed/2.1.0/py-modindex.html | 592 ++++ sed/2.1.0/search.html | 490 +++ sed/2.1.0/searchindex.js | 1 + sed/2.1.0/sed/api.html | 561 +++ sed/2.1.0/sed/binning.html | 939 +++++ sed/2.1.0/sed/calibrator.html | 2545 ++++++++++++++ sed/2.1.0/sed/config.html | 675 ++++ sed/2.1.0/sed/core.html | 1722 ++++++++++ sed/2.1.0/sed/dataset.html | 993 ++++++ sed/2.1.0/sed/dfops.html | 782 +++++ sed/2.1.0/sed/diagnostic.html | 611 ++++ sed/2.1.0/sed/io.html | 691 ++++ sed/2.1.0/sed/loader.html | 2842 +++++++++++++++ sed/2.1.0/sed/metadata.html | 639 ++++ sed/2.1.0/user_guide/advanced_topics.html | 544 +++ sed/2.1.0/user_guide/config.html | 1212 +++++++ sed/2.1.0/user_guide/index.html | 579 ++++ sed/2.1.0/user_guide/installation.html | 633 ++++ sed/2.1.0/workflows/index.html | 538 +++ sed/stable | 2 +- 103 files changed, 51632 insertions(+), 1 deletion(-) create mode 100644 sed/2.1.0/_modules/index.html create mode 100644 sed/2.1.0/_modules/sed/binning/binning.html create mode 100644 sed/2.1.0/_modules/sed/binning/numba_bin.html create mode 100644 sed/2.1.0/_modules/sed/binning/utils.html create mode 100644 sed/2.1.0/_modules/sed/calibrator/delay.html create mode 100644 sed/2.1.0/_modules/sed/calibrator/energy.html create mode 100644 sed/2.1.0/_modules/sed/calibrator/momentum.html create mode 100644 sed/2.1.0/_modules/sed/core/config.html create mode 100644 sed/2.1.0/_modules/sed/core/dfops.html create mode 100644 sed/2.1.0/_modules/sed/core/metadata.html create mode 100644 sed/2.1.0/_modules/sed/core/processor.html create mode 100644 sed/2.1.0/_modules/sed/dataset/dataset.html create mode 100644 sed/2.1.0/_modules/sed/diagnostics.html create mode 100644 sed/2.1.0/_modules/sed/io/hdf5.html create mode 100644 sed/2.1.0/_modules/sed/io/nexus.html create mode 100644 sed/2.1.0/_modules/sed/io/tiff.html create mode 100644 sed/2.1.0/_modules/sed/loader/base/loader.html create mode 100644 sed/2.1.0/_modules/sed/loader/flash/loader.html create mode 100644 sed/2.1.0/_modules/sed/loader/flash/metadata.html create mode 100644 sed/2.1.0/_modules/sed/loader/generic/loader.html create mode 100644 sed/2.1.0/_modules/sed/loader/loader_interface.html create mode 100644 sed/2.1.0/_modules/sed/loader/mirrorutil.html create mode 100644 sed/2.1.0/_modules/sed/loader/mpes/loader.html create mode 100644 sed/2.1.0/_modules/sed/loader/sxp/loader.html create mode 100644 sed/2.1.0/_modules/sed/loader/utils.html create mode 100644 sed/2.1.0/_sources/index.md.txt create mode 100644 sed/2.1.0/_sources/misc/contributing.rst.txt create mode 100644 sed/2.1.0/_sources/misc/contribution.md.txt create mode 100644 sed/2.1.0/_sources/misc/maintain.rst.txt create mode 100644 sed/2.1.0/_sources/sed/api.rst.txt create mode 100644 sed/2.1.0/_sources/sed/binning.rst.txt create mode 100644 sed/2.1.0/_sources/sed/calibrator.rst.txt create mode 100644 sed/2.1.0/_sources/sed/config.rst.txt create mode 100644 sed/2.1.0/_sources/sed/core.rst.txt create mode 100644 sed/2.1.0/_sources/sed/dataset.rst.txt create mode 100644 sed/2.1.0/_sources/sed/dfops.rst.txt create mode 100644 sed/2.1.0/_sources/sed/diagnostic.rst.txt create mode 100644 sed/2.1.0/_sources/sed/io.rst.txt create mode 100644 sed/2.1.0/_sources/sed/loader.rst.txt create mode 100644 sed/2.1.0/_sources/sed/metadata.rst.txt create mode 100644 sed/2.1.0/_sources/user_guide/advanced_topics.md.txt create mode 100644 sed/2.1.0/_sources/user_guide/config.md.txt create mode 100644 sed/2.1.0/_sources/user_guide/index.md.txt create mode 100644 sed/2.1.0/_sources/user_guide/installation.md.txt create mode 100644 sed/2.1.0/_sources/workflows/index.md.txt create mode 100644 sed/2.1.0/_static/basic.css create mode 100644 sed/2.1.0/_static/doctools.js create mode 100644 sed/2.1.0/_static/documentation_options.js create mode 100644 sed/2.1.0/_static/file.png create mode 100644 sed/2.1.0/_static/language_data.js create mode 100644 sed/2.1.0/_static/minus.png create mode 100644 sed/2.1.0/_static/nbsphinx-broken-thumbnail.svg create mode 100644 sed/2.1.0/_static/nbsphinx-code-cells.css create mode 100644 sed/2.1.0/_static/nbsphinx-gallery.css create mode 100644 sed/2.1.0/_static/nbsphinx-no-thumbnail.svg create mode 100644 sed/2.1.0/_static/plus.png create mode 100644 sed/2.1.0/_static/pygments.css create mode 100644 sed/2.1.0/_static/scripts/bootstrap.js create mode 100644 sed/2.1.0/_static/scripts/bootstrap.js.LICENSE.txt create mode 100644 sed/2.1.0/_static/scripts/bootstrap.js.map create mode 100644 sed/2.1.0/_static/scripts/fontawesome.js create mode 100644 sed/2.1.0/_static/scripts/fontawesome.js.LICENSE.txt create mode 100644 sed/2.1.0/_static/scripts/fontawesome.js.map create mode 100644 sed/2.1.0/_static/scripts/pydata-sphinx-theme.js create mode 100644 sed/2.1.0/_static/scripts/pydata-sphinx-theme.js.map create mode 100644 sed/2.1.0/_static/searchtools.js create mode 100644 sed/2.1.0/_static/sphinx_highlight.js create mode 100644 sed/2.1.0/_static/styles/pydata-sphinx-theme.css create mode 100644 sed/2.1.0/_static/styles/pydata-sphinx-theme.css.map create mode 100644 sed/2.1.0/_static/styles/theme.css create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-brands-400.ttf create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-brands-400.woff2 create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-regular-400.ttf create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-regular-400.woff2 create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-solid-900.ttf create mode 100644 sed/2.1.0/_static/vendor/fontawesome/webfonts/fa-solid-900.woff2 create mode 100644 sed/2.1.0/_static/webpack-macros.html create mode 100644 sed/2.1.0/genindex.html create mode 100644 sed/2.1.0/index.html create mode 100644 sed/2.1.0/misc/contributing.html create mode 100644 sed/2.1.0/misc/contribution.html create mode 100644 sed/2.1.0/misc/maintain.html create mode 100644 sed/2.1.0/objects.inv create mode 100644 sed/2.1.0/py-modindex.html create mode 100644 sed/2.1.0/search.html create mode 100644 sed/2.1.0/searchindex.js create mode 100644 sed/2.1.0/sed/api.html create mode 100644 sed/2.1.0/sed/binning.html create mode 100644 sed/2.1.0/sed/calibrator.html create mode 100644 sed/2.1.0/sed/config.html create mode 100644 sed/2.1.0/sed/core.html create mode 100644 sed/2.1.0/sed/dataset.html create mode 100644 sed/2.1.0/sed/dfops.html create mode 100644 sed/2.1.0/sed/diagnostic.html create mode 100644 sed/2.1.0/sed/io.html create mode 100644 sed/2.1.0/sed/loader.html create mode 100644 sed/2.1.0/sed/metadata.html create mode 100644 sed/2.1.0/user_guide/advanced_topics.html create mode 100644 sed/2.1.0/user_guide/config.html create mode 100644 sed/2.1.0/user_guide/index.html create mode 100644 sed/2.1.0/user_guide/installation.html create mode 100644 sed/2.1.0/workflows/index.html diff --git a/sed/2.1.0/_modules/index.html b/sed/2.1.0/_modules/index.html new file mode 100644 index 0000000..6894892 --- /dev/null +++ b/sed/2.1.0/_modules/index.html @@ -0,0 +1,510 @@ + + + + + + + + + + Overview: module code — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + + +
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/binning/binning.html b/sed/2.1.0/_modules/sed/binning/binning.html new file mode 100644 index 0000000..89ced35 --- /dev/null +++ b/sed/2.1.0/_modules/sed/binning/binning.html @@ -0,0 +1,1026 @@ + + + + + + + + + + sed.binning.binning — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.binning.binning

+"""This module contains the binning functions of the sed.binning module
+
+"""
+import gc
+from functools import reduce
+from typing import cast
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe
+import numpy as np
+import pandas as pd
+import psutil
+import xarray as xr
+from threadpoolctl import threadpool_limits
+from tqdm.auto import tqdm
+
+from .numba_bin import numba_histogramdd
+from .utils import _arraysum
+from .utils import bin_centers_to_bin_edges
+from .utils import simplify_binning_arguments
+
+N_CPU = psutil.cpu_count()
+
+
+
[docs]def bin_partition( + part: Union[dask.dataframe.DataFrame, pd.DataFrame], + bins: Union[ + int, + dict, + Sequence[int], + Sequence[np.ndarray], + Sequence[tuple], + ] = 100, + axes: Sequence[str] = None, + ranges: Sequence[Tuple[float, float]] = None, + hist_mode: str = "numba", + jitter: Union[list, dict] = None, + return_edges: bool = False, + skip_test: bool = False, +) -> Union[np.ndarray, Tuple[np.ndarray, list]]: + """Compute the n-dimensional histogram of a single dataframe partition. + + Args: + part (Union[dask.dataframe.DataFrame, pd.DataFrame]): dataframe on which + to perform the histogram. Usually a partition of a dask DataFrame. + bins (int, dict, Sequence[int], Sequence[np.ndarray], Sequence[tuple], optional): + Definition of the bins. Can be any of the following cases: + + - an integer describing the number of bins for all dimensions. This + requires "ranges" to be defined as well. + - A sequence containing one entry of the following types for each + dimension: + + - an integer describing the number of bins. This requires "ranges" + to be defined as well. + - a np.arrays defining the bin centers + - a tuple of 3 numbers describing start, end and step of the binning + range. + + - a dictionary made of the axes as keys and any of the above as + values. + + The last option takes priority over the axes and range arguments. + Defaults to 100. + axes (Sequence[str], optional): Sequence containing the names of + the axes (columns) on which to calculate the histogram. The order will be + the order of the dimensions in the resulting array. Only not required if + bins are provided as dictionary containing the axis names. + Defaults to None. + ranges (Sequence[Tuple[float, float]], optional): Sequence of tuples containing + the start and end point of the binning range. Required if bins given as + int or Sequence[int]. Defaults to None. + hist_mode (str, optional): Histogram calculation method. + + - "numpy": use ``numpy.histogramdd``, + - "numba" use a numba powered similar method. + + Defaults to "numba". + jitter (Union[list, dict], optional): a list of the axes on which to apply + jittering. To specify the jitter amplitude or method (normal or uniform + noise) a dictionary can be passed. This should look like + jitter={'axis':{'amplitude':0.5,'mode':'uniform'}}. + This example also shows the default behavior, in case None is + passed in the dictionary, or jitter is a list of strings. + Warning: this is not the most performing approach. Applying jitter + on the dataframe before calling the binning is much faster. + Defaults to None. + return_edges (bool, optional): If True, returns a list of D arrays + describing the bin edges for each dimension, similar to the + behavior of ``np.histogramdd``. Defaults to False. + skip_test (bool, optional): Turns off input check and data transformation. + Defaults to False as it is intended for internal use only. + Warning: setting this True might make error tracking difficult. + + Raises: + ValueError: When the method requested is not available. + AttributeError: if bins axes and range are not congruent in dimensionality. + KeyError: when the columns along which to compute the histogram are not + present in the dataframe + + Returns: + Union[np.ndarray, Tuple[np.ndarray, list]]: 2-element tuple returned only when + returnEdges is True. Otherwise only hist is returned. + + - **hist**: The result of the n-dimensional binning + - **edges**: A list of D arrays describing the bin edges for each dimension. + """ + if not skip_test: + bins, axes, ranges = simplify_binning_arguments(bins, axes, ranges) + else: + if not isinstance(bins, list) or not ( + all(isinstance(x, (int, np.int64)) for x in bins) + or all(isinstance(x, np.ndarray) for x in bins) + ): + raise TypeError( + "bins needs to be of type 'List[int] or List[np.ndarray]' if tests are skipped!", + ) + if not (isinstance(axes, list)) or not all(isinstance(axis, str) for axis in axes): + raise TypeError( + "axes needs to be of type 'List[str]' if tests are skipped!", + ) + bins = cast(Union[List[int], List[np.ndarray]], bins) + axes = cast(List[str], axes) + ranges = cast(List[Tuple[float, float]], ranges) + + # convert bin centers to bin edges: + if all(isinstance(x, np.ndarray) for x in bins): + bins = cast(List[np.ndarray], bins) + for i, bin_centers in enumerate(bins): + bins[i] = bin_centers_to_bin_edges(bin_centers) + else: + bins = cast(List[int], bins) + # shift ranges by half a bin size to align the bin centers to the given ranges, + # as the histogram functions interpret the ranges as limits for the edges. + for i, nbins in enumerate(bins): + halfbinsize = (ranges[i][1] - ranges[i][0]) / (nbins) / 2 + ranges[i] = ( + ranges[i][0] - halfbinsize, + ranges[i][1] - halfbinsize, + ) + + # Locate columns for binning operation + col_id = [part.columns.get_loc(axis) for axis in axes] + + if jitter is not None: + sel_part = part[axes].copy() + + if isinstance(jitter, Sequence): + jitter = {k: None for k in jitter} + for col, jpars in jitter.items(): + if col in axes: + if jpars is None: + jpars = {} + amp = jpars.get("amplitude", 0.5) + mode = jpars.get("mode", "uniform") + ax_index = axes.index(col) + _bin = bins[ax_index] + if isinstance(_bin, (int, np.int64)): + rng = ranges[ax_index] + binsize = abs(rng[1] - rng[0]) / _bin + else: + binsize = abs(_bin[0] - _bin[1]) + assert np.allclose( + binsize, + abs(_bin[-3] - _bin[-2]), + ), f"bins along {col} are not uniform. Cannot apply jitter." + apply_jitter_on_column(sel_part, amp * binsize, col, mode) + vals = sel_part.values + else: + vals = part.iloc[:, col_id].values + if vals.dtype == "object": + raise ValueError( + "Binning requires all binned dataframe columns to be of numeric type. " + "Encountered data types were " + f"{[part.columns[id] + ': ' + str(part.iloc[:, id].dtype) for id in col_id]}. " + "Please make sure all axes data are of numeric type.", + ) + if hist_mode == "numba": + hist_partition, edges = numba_histogramdd( + vals, + bins=bins, + ranges=ranges, + ) + elif hist_mode == "numpy": + hist_partition, edges = np.histogramdd( + vals, + bins=bins, + range=ranges, + ) + else: + raise ValueError( + f"No binning method {hist_mode} available. Please choose between " f"numba and numpy.", + ) + + if return_edges: + return hist_partition, edges + + return hist_partition
+ + +
[docs]def bin_dataframe( + df: dask.dataframe.DataFrame, + bins: Union[ + int, + dict, + Sequence[int], + Sequence[np.ndarray], + Sequence[tuple], + ] = 100, + axes: Sequence[str] = None, + ranges: Sequence[Tuple[float, float]] = None, + hist_mode: str = "numba", + mode: str = "fast", + jitter: Union[list, dict] = None, + pbar: bool = True, + n_cores: int = N_CPU - 1, + threads_per_worker: int = 4, + threadpool_api: str = "blas", + return_partitions: bool = False, + **kwds, +) -> xr.DataArray: + """Computes the n-dimensional histogram on columns of a dataframe, + parallelized. + + Args: + df (dask.dataframe.DataFrame): a dask.DataFrame on which to perform the + histogram. + bins (int, dict, Sequence[int], Sequence[np.ndarray], Sequence[tuple], optional): + Definition of the bins. Can be any of the following cases: + + - an integer describing the number of bins for all dimensions. This + requires "ranges" to be defined as well. + - A sequence containing one entry of the following types for each + dimension: + + - an integer describing the number of bins. This requires "ranges" + to be defined as well. + - a np.arrays defining the bin centers + - a tuple of 3 numbers describing start, end and step of the binning + range. + + - a dictionary made of the axes as keys and any of the above as + values. + + The last option takes priority over the axes and range arguments. + Defaults to 100. + axes (Sequence[str], optional): Sequence containing the names of + the axes (columns) on which to calculate the histogram. The order will be + the order of the dimensions in the resulting array. Only not required if + bins are provided as dictionary containing the axis names. + Defaults to None. + ranges (Sequence[Tuple[float, float]], optional): Sequence of tuples containing + the start and end point of the binning range. Required if bins given as + int or Sequence[int]. Defaults to None. + hist_mode (str, optional): Histogram calculation method. + + - "numpy": use ``numpy.histogramdd``, + - "numba" use a numba powered similar method. + + Defaults to "numba". + mode (str, optional): Defines how the results from each partition are combined. + + - 'fast': Uses parallelized recombination of results. + - 'lean': Store all partition results in a list, and recombine at the + end. + - 'legacy': Single-core recombination of partition results. + + Defaults to "fast". + jitter (Union[list, dict], optional): a list of the axes on which to apply + jittering. To specify the jitter amplitude or method (normal or uniform + noise) a dictionary can be passed. This should look like + jitter={'axis':{'amplitude':0.5,'mode':'uniform'}}. + This example also shows the default behavior, in case None is + passed in the dictionary, or jitter is a list of strings. + Warning: this is not the most performing approach. applying jitter + on the dataframe before calling the binning is much faster. + Defaults to None. + pbar (bool, optional): Option to show the tqdm progress bar. Defaults to True. + n_cores (int, optional): Number of CPU cores to use for parallelization. + Defaults to all but one of the available cores. Defaults to N_CPU-1. + threads_per_worker (int, optional): Limit the number of threads that + multiprocessing can spawn. Defaults to 4. + threadpool_api (str, optional): The API to use for multiprocessing. + Defaults to "blas". + return_partitions (bool, optional): Option to return a hypercube of dimension + n+1, where the last dimension corresponds to the dataframe partitions. + Defaults to False. + **kwds: Keyword arguments passed to ``dask.compute()`` + + Raises: + Warning: Warns if there are unimplemented features the user is trying to use. + ValueError: Raised when there is a mismatch in dimensions between the + binning parameters. + + Returns: + xr.DataArray: The result of the n-dimensional binning represented in an + xarray object, combining the data with the axes (bin centers). + """ + bins, axes, ranges = simplify_binning_arguments(bins, axes, ranges) + + # create the coordinate axes for the xarray output + # if provided as array, they are interpreted as bin centers + if isinstance(bins[0], np.ndarray): + bins = cast(List[np.ndarray], bins) + coords = dict(zip(axes, bins)) + elif ranges is None: + raise ValueError( + "bins is not an array and range is none. this shouldn't happen.", + ) + else: + bins = cast(List[int], bins) + coords = { + ax: np.linspace(r[0], r[1], n, endpoint=False) for ax, r, n in zip(axes, ranges, bins) + } + + full_shape = tuple(axis.size for axis in coords.values()) + + full_result = np.zeros(full_shape) + partition_results = [] # Partition-level results + + # limit multithreading in worker threads + with threadpool_limits(limits=threads_per_worker, user_api=threadpool_api): + # Main loop for binning + for i in tqdm(range(0, df.npartitions, n_cores), disable=not pbar): + core_tasks = [] # Core-level jobs + for j in range(0, n_cores): + partition_index = i + j + if partition_index >= df.npartitions: + break + + df_partition = df.get_partition( + partition_index, + ) # Obtain dataframe partition + core_tasks.append( + dask.delayed(bin_partition)( + df_partition, + bins=bins, + axes=axes, + ranges=ranges, + hist_mode=hist_mode, + jitter=jitter, + skip_test=True, + return_edges=False, + ), + ) + + if len(core_tasks) > 0: + core_results = dask.compute(*core_tasks, **kwds) + + if return_partitions: + for core_result in core_results: + partition_results.append(core_result) + del core_results + + elif mode == "legacy": + # Combine all core results for a dataframe partition + partition_result = np.zeros_like(core_results[0]) + for core_result in core_results: + partition_result += core_result + + partition_results.append(partition_result) + # del partitionResult + + elif mode == "lean": + # Combine all core results for a dataframe partition + partition_result = reduce(_arraysum, core_results) + full_result += partition_result + del partition_result + del core_results + + elif mode == "fast": + combine_tasks = [] + for j in range(0, n_cores): + combine_parts = [] + # split results along the first dimension among worker + # threads + for core_result in core_results: + combine_parts.append( + core_result[ + int(j * full_shape[0] / n_cores) : int( + (j + 1) * full_shape[0] / n_cores, + ), + ..., + ], + ) + combine_tasks.append( + dask.delayed(reduce)(_arraysum, combine_parts), + ) + combine_results = dask.compute(*combine_tasks, **kwds) + # Directly fill into target array. This is much faster than + # the (not so parallel) reduce/concatenation used before, + # and uses less memory. + + for j in range(0, n_cores): + full_result[ + int(j * full_shape[0] / n_cores) : int( + (j + 1) * full_shape[0] / n_cores, + ), + ..., + ] += combine_results[j] + del combine_parts + del combine_tasks + del combine_results + del core_results + else: + raise ValueError(f"Could not interpret mode {mode}") + + del core_tasks + + if return_partitions: + coords = {**coords, **{"df_part": np.arange(df.npartitions)}} + dims = list(axes) + dims.append("df_part") + data_array = xr.DataArray( + data=np.stack(partition_results, axis=-1).astype("float32"), + coords=coords, + dims=dims, + ) + + else: + if mode == "legacy": + # still need to combine all partition results + full_result = np.zeros_like(partition_results[0]) + for partition_result in partition_results: + full_result += np.nan_to_num(partition_result) + + data_array = xr.DataArray( + data=full_result.astype("float32"), + coords=coords, + dims=list(axes), + ) + + gc.collect() + return data_array
+ + +def normalization_histogram_from_timestamps( + df: dask.dataframe.DataFrame, + axis: str, + bin_centers: np.ndarray, + time_stamp_column: str, +) -> xr.DataArray: + """Get a normalization histogram from the time stamps column in the dataframe. + + Args: + df (dask.dataframe.DataFrame): a dask.DataFrame on which to perform the + histogram. + axis (str): The axis (dataframe column) on which to calculate the normalization + histogram. + bin_centers (np.ndarray): Bin centers used for binning of the axis. + time_stamp_column (str): Dataframe column containing the time stamps. + + Returns: + xr.DataArray: Calculated normalization histogram. + """ + time_per_electron = df[time_stamp_column].diff() + + bins = df[axis].map_partitions( + pd.cut, + bins=bin_centers_to_bin_edges(bin_centers), + ) + + histogram = time_per_electron.groupby([bins]).sum().compute().values + + data_array = xr.DataArray( + data=histogram, + coords={axis: bin_centers}, + ) + + return data_array + + +def normalization_histogram_from_timed_dataframe( + df: dask.dataframe.DataFrame, + axis: str, + bin_centers: np.ndarray, + time_unit: float, +) -> xr.DataArray: + """Get a normalization histogram from a timed dataframe. + + Args: + df (dask.dataframe.DataFrame): a dask.DataFrame on which to perform the + histogram. Entries should be based on an equal time unit. + axis (str): The axis (dataframe column) on which to calculate the normalization + histogram. + bin_centers (np.ndarray): Bin centers used for binning of the axis. + time_unit (float): Time unit the data frame entries are based on. + + Returns: + xr.DataArray: Calculated normalization histogram. + """ + bins = df[axis].map_partitions( + pd.cut, + bins=bin_centers_to_bin_edges(bin_centers), + ) + + histogram = df[axis].groupby([bins]).count().compute().values * time_unit + # histogram = bin_dataframe(df, axes=[axis], bins=[bin_centers]) * time_unit + + data_array = xr.DataArray( + data=histogram, + coords={axis: bin_centers}, + ) + + return data_array + + +def apply_jitter_on_column( + df: Union[dask.dataframe.core.DataFrame, pd.DataFrame], + amp: float, + col: str, + mode: str = "uniform", +): + """Add jittering to the column of a dataframe. + + Args: + df (Union[dask.dataframe.core.DataFrame, pd.DataFrame]): Dataframe to add + noise/jittering to. + amp (float): Amplitude scaling for the jittering noise. + col (str): Name of the column to add jittering to. + mode (str, optional): Choose between 'uniform' for uniformly + distributed noise, or 'normal' for noise with normal distribution. + For columns with digital values, one should choose 'uniform' as + well as amplitude (amp) equal to the step size. Defaults to "uniform". + """ + colsize = df[col].size + if mode == "uniform": + # Uniform Jitter distribution + df[col] += amp * np.random.uniform(low=-1, high=1, size=colsize) + elif mode == "normal": + # Normal Jitter distribution works better for non-linear + # transformations and jitter sizes that don't match the original bin + # sizes + df[col] += amp * np.random.standard_normal(size=colsize) +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/binning/numba_bin.html b/sed/2.1.0/_modules/sed/binning/numba_bin.html new file mode 100644 index 0000000..832f130 --- /dev/null +++ b/sed/2.1.0/_modules/sed/binning/numba_bin.html @@ -0,0 +1,744 @@ + + + + + + + + + + sed.binning.numba_bin — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.binning.numba_bin

+"""This file contains code for binning using numba precompiled code for the
+sed.binning module
+
+"""
+from typing import Any
+from typing import cast
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import numba
+import numpy as np
+
+
+@numba.jit(nogil=True, nopython=True)
+def _hist_from_bin_range(
+    sample: np.ndarray,
+    bins: Sequence[int],
+    ranges: np.ndarray,
+) -> np.ndarray:
+    """N dimensional binning function, pre-compiled by Numba for performance.
+    Behaves much like numpy.histogramdd, but calculates and returns unsigned 32
+    bit integers.
+
+    Args:
+        sample (np.ndarray): The data to be histogram'd with shape N,D.
+        bins (Sequence[int]): The number of bins for each dimension D.
+        ranges (np.ndarray): A sequence of length D, each an optional (lower,
+            upper) tuple giving the outer bin edges to be used if the edges are
+            not given explicitly in bins.
+
+    Raises:
+        ValueError: In case of dimension mismatch.
+
+    Returns:
+        np.ndarray: The computed histogram.
+    """
+    ndims = len(bins)
+    if sample.shape[1] != ndims:
+        raise ValueError(
+            "The dimension of bins is not equal to the dimension of the sample x",
+        )
+
+    hist = np.zeros(bins, np.uint32)
+    hist_flat = hist.ravel()
+    delta = np.zeros(ndims, np.float64)
+    strides = np.zeros(ndims, np.int64)
+
+    for i in range(ndims):
+        delta[i] = 1 / ((ranges[i, 1] - ranges[i, 0]) / bins[i])
+        strides[i] = hist.strides[i] // hist.itemsize
+
+    for t in range(sample.shape[0]):
+        is_inside = True
+        flatidx = 0
+        for i in range(ndims):
+            # strip off numerical rounding errors
+            j = round((sample[t, i] - ranges[i, 0]) * delta[i], 11)
+            # add counts on last edge
+            if j == bins[i]:
+                j = bins[i] - 1
+            is_inside = is_inside and (0 <= j < bins[i])
+            flatidx += int(j) * strides[i]
+            # don't check all axes if you already know you're out of the range
+            if not is_inside:
+                break
+        if is_inside:
+            hist_flat[flatidx] += int(is_inside)
+
+    return hist
+
+
+
[docs]@numba.jit(nogil=True, parallel=False, nopython=True) +def binsearch(bins: np.ndarray, val: float) -> int: + """Bisection index search function. + + Finds the index of the bin with the highest value below val, i.e. the left edge. + returns -1 when the value is outside the bin range. + + Args: + bins (np.ndarray): the array on which + val (float): value to search for + + Returns: + int: index of the bin array, returns -1 when value is outside the bins range + """ + if np.isnan(val): + return -1 + low, high = 0, len(bins) - 1 + mid = high // 2 + if val == bins[high]: + return high - 1 + if (val < bins[low]) | (val > bins[high]): + return -1 + + while True: + if val < bins[mid]: + high = mid + elif val < bins[mid + 1]: + return mid + else: + low = mid + mid = (low + high) // 2
+ + +@numba.jit(nopython=True, nogil=True, parallel=False) +def _hist_from_bins( + sample: np.ndarray, + bins: Sequence[np.ndarray], + shape: Tuple, +) -> np.ndarray: + """Numba powered binning method, similar to np.histogramdd. + + Computes the histogram on pre-defined bins. + + Args: + sample (np.ndarray) : the array of shape (N,D) on which to compute the histogram + bins (Sequence[np.ndarray]): array of shape (N,D) defining the D bins on which + to compute the histogram, i.e. the desired output axes. + shape (Tuple): shape of the resulting array. Workaround for the fact numba + does not allow to create tuples. + Returns: + hist: the computed n-dimensional histogram + """ + ndims = len(bins) + if sample.shape[1] != ndims: + raise ValueError( + "The dimension of bins is not equal to the dimension of the sample x", + ) + hist = np.zeros(shape, np.uint32) + hist_flat = hist.ravel() + + strides = np.zeros(ndims, np.int64) + + for i in range(ndims): + strides[i] = hist.strides[i] // hist.itemsize # pylint: disable=E1136 + for t in range(sample.shape[0]): + is_inside = True + flatidx = 0 + for i in range(ndims): + j = binsearch(bins[i], sample[t, i]) + # binsearch returns -1 when the value is outside the bin range + is_inside = is_inside and (j >= 0) + flatidx += int(j) * strides[i] + # don't check all axes if you already know you're out of the range + if not is_inside: + break + if is_inside: + hist_flat[flatidx] += int(is_inside) + + return hist + + +
[docs]def numba_histogramdd( + sample: np.ndarray, + bins: Union[int, Sequence[int], Sequence[np.ndarray], np.ndarray], + ranges: Sequence = None, +) -> Tuple[np.ndarray, List[np.ndarray]]: + """Multidimensional histogram function, powered by Numba. + + Behaves in total much like numpy.histogramdd. Returns uint32 arrays. + This was chosen because it has a significant performance improvement over + uint64 for large binning volumes. Be aware that this can cause overflows + for very large sample sets exceeding 3E9 counts in a single bin. This + should never happen in a realistic photoemission experiment with useful bin + sizes. + + Args: + sample (np.ndarray): The data to be histogram'd with shape N,D + bins (Union[int, Sequence[int], Sequence[np.ndarray], np.ndarray]): The number + of bins for each dimension D, or a sequence of bin edges on which to calculate + the histogram. + ranges (Sequence, optional): The range(s) to use for binning when bins is a sequence + of integers or sequence of arrays. Defaults to None. + + Raises: + ValueError: In case of dimension mismatch. + TypeError: Wrong type for bins. + ValueError: In case of wrong shape of bins + RuntimeError: Internal shape error after binning + + Returns: + Tuple[np.ndarray, List[np.ndarray]]: 2-element tuple of The computed histogram + and s list of D arrays describing the bin edges for each dimension. + + - **hist**: The computed histogram + - **edges**: A list of D arrays describing the bin edges for + each dimension. + """ + try: + # Sample is an ND-array. + num_rows, num_cols = sample.shape # pylint: disable=unused-variable + except (AttributeError, ValueError): + # Sample is a sequence of 1D arrays. + sample = np.atleast_2d(sample).T + num_rows, num_cols = sample.shape # pylint: disable=unused-variable + + if isinstance(bins, (int, np.int_)): # bins provided as a single number + bins = num_cols * [bins] + num_bins = len(bins) # Number of dimensions in bins + + if num_bins != num_cols: # check number of dimensions + raise ValueError( + "The dimension of bins must be equal to the dimension of the sample x.", + ) + + if not isinstance(bins[0], (int, np.int_, np.ndarray)): + raise TypeError( + f"bins must be int, np.ndarray or a sequence of the two. " + f"Found {type(bins[0])} instead", + ) + + # method == "array" + if isinstance(bins[0], np.ndarray): + bins = cast(List[np.ndarray], list(bins)) + hist = _hist_from_bins( + sample, + tuple(bins), + tuple(b.size - 1 for b in bins), + ) + return hist, bins + + # method == "int" + assert isinstance(bins[0], (int, np.int_)) + # normalize the range argument + if ranges is None: + raise ValueError( + "must define a value for ranges when bins is the number of bins", + ) + if num_cols == 1 and isinstance(ranges[0], (int, float)): + ranges = (ranges,) + elif len(ranges) != num_cols: + raise ValueError( + "range argument must have one entry per dimension", + ) + + # ranges = np.asarray(ranges) + bins = tuple(bins) + + # Create edge arrays + edges: List[Any] = [] + nbin = np.empty(num_cols, int) + + for i in range(num_cols): + edges.append(np.linspace(ranges[i][0], ranges[i][1], bins[i] + 1)) + + nbin[i] = len(edges[i]) + 1 # includes an outlier on each end + + hist = _hist_from_bin_range(sample, bins, np.asarray(ranges)) + + if (hist.shape != nbin - 2).any(): + raise RuntimeError("Internal Shape Error") + + return hist, edges
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/binning/utils.html b/sed/2.1.0/_modules/sed/binning/utils.html new file mode 100644 index 0000000..b825b97 --- /dev/null +++ b/sed/2.1.0/_modules/sed/binning/utils.html @@ -0,0 +1,666 @@ + + + + + + + + + + sed.binning.utils — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.binning.utils

+"""This file contains helper functions for the sed.binning module
+
+"""
+from typing import cast
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import numpy as np
+
+
+def _arraysum(array_a, array_b):
+    """Calculate the sum of two arrays."""
+    return array_a + array_b
+
+
+
[docs]def simplify_binning_arguments( + bins: Union[ + int, + dict, + Sequence[int], + Sequence[np.ndarray], + Sequence[tuple], + ], + axes: Sequence[str] = None, + ranges: Sequence[Tuple[float, float]] = None, +) -> Tuple[Union[List[int], List[np.ndarray]], List[str], List[Tuple[float, float]]]: + """Convert the flexible input for defining bins into a + simple "axes" "bins" "ranges" tuple. + + This allows to mimic the input used in numpy.histogramdd flexibility into the + binning functions defined here. + + Args: + bins (int, dict, Sequence[int], Sequence[np.ndarray], Sequence[tuple]): + Definition of the bins. Can be any of the following cases: + + - an integer describing the number of bins for all dimensions. This + requires "ranges" to be defined as well. + - A sequence containing one entry of the following types for each + dimension: + + - an integer describing the number of bins. This requires "ranges" + to be defined as well. + - a np.arrays defining the bin centers + - a tuple of 3 numbers describing start, end and step of the binning + range. + + - a dictionary made of the axes as keys and any of the above as + values. + + The last option takes priority over the axes and range arguments. + axes (Sequence[str], optional): Sequence containing the names of + the axes (columns) on which to calculate the histogram. The order will be + the order of the dimensions in the resulting array. Only not required if + bins are provided as dictionary containing the axis names. + Defaults to None. + ranges (Sequence[Tuple[float, float]], optional): Sequence of tuples containing + the start and end point of the binning range. Required if bins given as + int or Sequence[int]. Defaults to None. + + Raises: + ValueError: Wrong shape of bins, + TypeError: Wrong type of bins + AttributeError: Axes not defined + AttributeError: Shape mismatch + + Returns: + Tuple[Union[List[int], List[np.ndarray]], List[Tuple[float, float]]]: Tuple + containing lists of bin centers, axes, and ranges. + """ + # if bins is a dictionary: unravel to axes and bins + if isinstance(bins, dict): + axes = [] + bins_ = [] + for k, v in bins.items(): + axes.append(k) + bins_.append(v) + bins = bins_ + + # if bins provided as single int, apply to all dimensions + if isinstance(bins, (int, np.int64)): + bins = [bins] * len(axes) + + # Check that we have a sequence of bins now + if not isinstance(bins, Sequence): + raise TypeError(f"Cannot interpret bins of type {type(bins)}") + + # check that we have axes + if axes is None: + raise AttributeError("Must define on which axes to bin") + + # check that axes is a sequence + if not isinstance(axes, Sequence): + raise TypeError(f"Cannot interpret axes of type {type(axes)}") + + # check that all elements of axes are str + if not all(isinstance(axis, str) for axis in axes): + raise TypeError("Axes has to contain only strings!") + + # we got tuples as bins, expand to bins and ranges + if all(isinstance(x, tuple) for x in bins): + bins = cast(Sequence[tuple], bins) + assert len(bins[0]) == 3, "Tuples as bins need to have format (start, end, num_bins)." + ranges = [] + bins_ = [] + for tpl in bins: + assert isinstance(tpl, tuple) + ranges.append((tpl[0], tpl[1])) + bins_.append(tpl[2]) + bins = bins_ + + # if bins are provided as int, check that ranges are present + if all(isinstance(x, (int, np.int64)) for x in bins): + bins = cast(List[int], list(bins)) + if ranges is None: + raise AttributeError( + "Must provide a range if bins is an integer or list of integers", + ) + if not isinstance(ranges, Sequence): + raise AttributeError( + f"Ranges must be a sequence, not {type(ranges)}.", + ) + + # otherwise, all bins should be of type np.ndarray here + elif all(isinstance(x, np.ndarray) for x in bins): + bins = cast(List[np.ndarray], list(bins)) + else: + raise TypeError(f"Could not interpret bins of type {type(bins)}") + + # check that number of bins and number of axes is the same. + if len(axes) != len(bins): + raise AttributeError( + "axes and bins must have the same number of elements", + ) + + return bins, list(axes), list(ranges) if ranges else None
+ + +
[docs]def bin_edges_to_bin_centers(bin_edges: np.ndarray) -> np.ndarray: + """Converts a list of bin edges into corresponding bin centers + + Args: + bin_edges: 1d array of bin edges + + Returns: + bin_centers: 1d array of bin centers + """ + bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2 + + return bin_centers
+ + +
[docs]def bin_centers_to_bin_edges(bin_centers: np.ndarray) -> np.ndarray: + """Converts a list of bin centers into corresponding bin edges + + Args: + bin_centers: 1d array of bin centers + + Returns: + bin_edges: 1d array of bin edges + """ + bin_edges = (bin_centers[1:] + bin_centers[:-1]) / 2 + + bin_edges = np.insert( + bin_edges, + 0, + bin_centers[0] - (bin_centers[1] - bin_centers[0]) / 2, + ) + bin_edges = np.append( + bin_edges, + bin_centers[len(bin_centers) - 1] + + (bin_centers[len(bin_centers) - 1] - bin_centers[len(bin_centers) - 2]) / 2, + ) + + return bin_edges
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/calibrator/delay.html b/sed/2.1.0/_modules/sed/calibrator/delay.html new file mode 100644 index 0000000..7a50704 --- /dev/null +++ b/sed/2.1.0/_modules/sed/calibrator/delay.html @@ -0,0 +1,910 @@ + + + + + + + + + + sed.calibrator.delay — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.calibrator.delay

+"""sed.calibrator.delay module. Code for delay calibration.
+"""
+from copy import deepcopy
+from datetime import datetime
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe
+import h5py
+import numpy as np
+import pandas as pd
+
+from sed.core import dfops
+
+
+
[docs]class DelayCalibrator: + """ + Pump-Probe delay calibration methods. + Initialization of the DelayCalibrator class passes the config. + + Args: + config (dict, optional): Config dictionary. Defaults to None. + """ + + def __init__( + self, + config: dict = None, + ) -> None: + """Initialization of the DelayCalibrator class passes the config. + + Args: + config (dict, optional): Config dictionary. Defaults to None. + """ + if config is not None: + self._config = config + else: + self._config = {} + + self.adc_column: str = self._config["dataframe"].get("adc_column", None) + self.delay_column: str = self._config["dataframe"]["delay_column"] + self.corrected_delay_column = self._config["dataframe"].get( + "corrected_delay_column", + self.delay_column, + ) + self.calibration: Dict[str, Any] = self._config["delay"].get("calibration", {}) + self.offsets: Dict[str, Any] = self._config["delay"].get("offsets", {}) + +
[docs] def append_delay_axis( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + adc_column: str = None, + delay_column: str = None, + calibration: Dict[str, Any] = None, + adc_range: Union[Tuple, List, np.ndarray] = None, + delay_range: Union[Tuple, List, np.ndarray] = None, + time0: float = None, + delay_range_mm: Union[Tuple, List, np.ndarray] = None, + datafile: str = None, + p1_key: str = None, + p2_key: str = None, + t0_key: str = None, + verbose: bool = True, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Calculate and append the delay axis to the events dataframe, by converting + values from an analog-digital-converter (ADC). + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): The dataframe where + to apply the delay calibration to. + adc_column (str, optional): Source column for delay calibration. + Defaults to config["dataframe"]["adc_column"]. + delay_column (str, optional): Destination column for delay calibration. + Defaults to config["dataframe"]["delay_column"]. + calibration (dict, optional): Calibration dictionary with parameters for + delay calibration. + adc_range (Union[Tuple, List, np.ndarray], optional): The range of used + ADC values. Defaults to config["delay"]["adc_range"]. + delay_range (Union[Tuple, List, np.ndarray], optional): Range of scanned + delay values in ps. If omitted, the range is calculated from the + delay_range_mm and t0 values. + time0 (float, optional): Pump-Probe overlap value of the delay coordinate. + If omitted, it is searched for in the data files. + delay_range_mm (Union[Tuple, List, np.ndarray], optional): Range of scanned + delay stage in mm. If omitted, it is searched for in the data files. + datafile (str, optional): Datafile in which delay parameters are searched + for. Defaults to None. + p1_key (str, optional): hdf5 key for delay_range_mm start value. + Defaults to config["delay"]["p1_key"] + p2_key (str, optional): hdf5 key for delay_range_mm end value. + Defaults to config["delay"]["p2_key"] + t0_key (str, optional): hdf5 key for t0 value (mm). + Defaults to config["delay"]["t0_key"] + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + + Raises: + ValueError: Raised if delay parameters are not found in the file. + NotImplementedError: Raised if no sufficient information passed. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added column + and delay calibration metadata dictionary. + """ + # pylint: disable=duplicate-code + if calibration is None: + calibration = deepcopy(self.calibration) + + if ( + adc_range is not None + or delay_range is not None + or time0 is not None + or delay_range_mm is not None + or datafile is not None + ): + calibration = {} + calibration["creation_date"] = datetime.now().timestamp() + if adc_range is not None: + calibration["adc_range"] = adc_range + if delay_range is not None: + calibration["delay_range"] = delay_range + if time0 is not None: + calibration["time0"] = time0 + if delay_range_mm is not None: + calibration["delay_range_mm"] = delay_range_mm + else: + # report usage of loaded parameters + if "creation_date" in calibration and verbose: + datestring = datetime.fromtimestamp(calibration["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using delay calibration parameters generated on {datestring}") + + if adc_column is None: + adc_column = self.adc_column + if delay_column is None: + delay_column = self.delay_column + if p1_key is None: + p1_key = self._config["delay"].get("p1_key", "") + if p2_key is None: + p2_key = self._config["delay"].get("p2_key", "") + if t0_key is None: + t0_key = self._config["delay"].get("t0_key", "") + + if "adc_range" not in calibration.keys(): + calibration["adc_range"] = np.asarray( + self._config["delay"]["adc_range"], + ) / 2 ** (self._config["dataframe"]["adc_binning"] - 1) + + if "delay_range" not in calibration.keys(): + if "delay_range_mm" not in calibration.keys() or "time0" not in calibration.keys(): + if datafile is not None and p1_key and p2_key and t0_key: + try: + ret = extract_delay_stage_parameters( + datafile, + p1_key, + p2_key, + t0_key, + ) + except KeyError as exc: + raise ValueError( + "Delay stage values not found in file", + ) from exc + calibration["datafile"] = datafile + calibration["delay_range_mm"] = (ret[0], ret[1]) + calibration["time0"] = ret[2] + if verbose: + print(f"Extract delay range from file '{datafile}'.") + else: + raise NotImplementedError( + "Not enough parameters for delay calibration.", + ) + + calibration["delay_range"] = np.asarray( + mm_to_ps( + np.asarray(calibration["delay_range_mm"]), + calibration["time0"], + ), + ) + if verbose: + print(f"Converted delay_range (ps) = {calibration['delay_range']}") + calibration["creation_date"] = datetime.now().timestamp() + + if "delay_range" in calibration.keys(): + df[delay_column] = calibration["delay_range"][0] + ( + df[adc_column] - calibration["adc_range"][0] + ) * (calibration["delay_range"][1] - calibration["delay_range"][0]) / ( + calibration["adc_range"][1] - calibration["adc_range"][0] + ) + self.calibration = deepcopy(calibration) + if verbose: + print( + "Append delay axis using delay_range = " + f"[{calibration['delay_range'][0]}, {calibration['delay_range'][1]}]" + " and adc_range = " + f"[{calibration['adc_range'][0]}, {calibration['adc_range'][1]}]", + ) + else: + raise NotImplementedError + + metadata = {"calibration": calibration} + return df, metadata
+ +
[docs] def add_offsets( + self, + df: dask.dataframe.DataFrame, + offsets: Dict[str, Any] = None, + constant: float = None, + flip_delay_axis: bool = None, + columns: Union[str, Sequence[str]] = None, + weights: Union[float, Sequence[float]] = 1.0, + preserve_mean: Union[bool, Sequence[bool]] = False, + reductions: Union[str, Sequence[str]] = None, + delay_column: str = None, + verbose: bool = True, + ) -> Tuple[dask.dataframe.DataFrame, dict]: + """Apply an offset to the delay column based on a constant or other columns. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + offsets (Dict, optional): Dictionary of delay offset parameters. + constant (float, optional): The constant to shift the delay axis by. + flip_delay_axis (bool, optional): Whether to flip the time axis. Defaults to False. + columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. + weights (Union[int, Sequence[int]]): weights to apply to the columns. + Can also be used to flip the sign (e.g. -1). Defaults to 1. + preserve_mean (bool): Whether to subtract the mean of the column before applying the + shift. Defaults to False. + reductions (str): The reduction to apply to the column. Should be an available method + of dask.dataframe.Series. For example "mean". In this case the function is applied + to the column to generate a single value for the whole dataset. If None, the shift + is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + delay_column (str, optional): Name of the column containing the delay values. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + + Returns: + dask.dataframe.DataFrame: Dataframe with the shifted delay axis. + dict: Metadata dictionary. + """ + if offsets is None: + offsets = deepcopy(self.offsets) + + if delay_column is None: + delay_column = self.delay_column + + metadata: Dict[str, Any] = { + "applied": True, + } + + if columns is not None or constant is not None or flip_delay_axis: + # pylint:disable=duplicate-code + # use passed parameters, overwrite config + offsets = {} + offsets["creation_date"] = datetime.now().timestamp() + # column-based offsets + if columns is not None: + if weights is None: + weights = 1 + if isinstance(weights, (int, float, np.integer, np.floating)): + weights = [weights] + if len(weights) == 1: + weights = [weights[0]] * len(columns) + if not isinstance(weights, Sequence): + raise TypeError( + f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + ) + if not all(isinstance(s, (int, float, np.integer, np.floating)) for s in weights): + raise TypeError( + f"Invalid type for weights: {type(weights)}. Must be a number or sequence", + ) + + if isinstance(columns, str): + columns = [columns] + if isinstance(preserve_mean, bool): + preserve_mean = [preserve_mean] * len(columns) + if not isinstance(reductions, Sequence): + reductions = [reductions] + if len(reductions) == 1: + reductions = [reductions[0]] * len(columns) + + # store in offsets dictionary + for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): + offsets[col] = { + "weight": weight, + "preserve_mean": pmean, + "reduction": red, + } + + # constant offset + if isinstance(constant, (int, float, np.integer, np.floating)): + offsets["constant"] = constant + elif constant is not None: + raise TypeError(f"Invalid type for constant: {type(constant)}") + # flip the time direction + if flip_delay_axis: + offsets["flip_delay_axis"] = flip_delay_axis + + elif "creation_date" in offsets and verbose: + datestring = datetime.fromtimestamp(offsets["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using delay offset parameters generated on {datestring}") + + if len(offsets) > 0: + # unpack dictionary + columns = [] + weights = [] + preserve_mean = [] + reductions = [] + if verbose: + print("Delay offset parameters:") + for k, v in offsets.items(): + if k == "creation_date": + continue + if k == "constant": + constant = v + if verbose: + print(f" Constant: {constant} ") + elif k == "flip_delay_axis": + fda = str(v) + if fda.lower() in ["true", "1"]: + flip_delay_axis = True + elif fda.lower() in ["false", "0"]: + flip_delay_axis = False + else: + raise ValueError( + f"Invalid value for flip_delay_axis in config: {flip_delay_axis}.", + ) + if verbose: + print(f" Flip delay axis: {flip_delay_axis} ") + else: + columns.append(k) + try: + weight = v["weight"] + except KeyError: + weight = 1 + weights.append(weight) + pm = v.get("preserve_mean", False) + preserve_mean.append(pm) + red = v.get("reduction", None) + reductions.append(red) + if verbose: + print( + f" Column[{k}]: Weight={weight}, Preserve Mean: {pm}, ", + f"Reductions: {red}.", + ) + + if len(columns) > 0: + df = dfops.offset_by_other_columns( + df=df, + target_column=delay_column, + offset_columns=columns, + weights=weights, + preserve_mean=preserve_mean, + reductions=reductions, + ) + + if constant: + df[delay_column] = df.map_partitions( + lambda x: x[delay_column] + constant, + meta=(delay_column, np.float64), + ) + + if flip_delay_axis: + df[delay_column] = -df[delay_column] + + self.offsets = offsets + metadata["offsets"] = offsets + + return df, metadata
+ + +
[docs]def extract_delay_stage_parameters( + file: str, + p1_key: str, + p2_key: str, + t0_key: str, +) -> Tuple: + """ + Read delay stage ranges from hdf5 file + + Parameters: + file (str): filename + p1_key (str): hdf5 path to the start of the scan range + p2_key (str): hdf5 path to the end of the scan range + t0_key (str): hdf5 path to the t0 value + + Returns: + tuple: (p1_value, p2_value, t0_value) + """ + with h5py.File(file, "r") as file_handle: + values = [] + for key in [p1_key, p2_key, t0_key]: + if key[0] == "@": + values.append(file_handle.attrs[key[1:]]) + else: + values.append(file_handle[p1_key]) + + return tuple(values)
+ + +
[docs]def mm_to_ps( + delay_mm: Union[float, np.ndarray], + time0_mm: float, +) -> Union[float, np.ndarray]: + """Converts a delay stage position in mm into a relative delay in picoseconds + (double pass). + + Args: + delay_mm (Union[float, Sequence[float]]): Delay stage position in mm + time0_mm (float): Delay stage position of pump-probe overlap in mm + + Returns: + Union[float, Sequence[float]]: Relative delay in picoseconds + """ + delay_ps = (delay_mm - time0_mm) / 0.15 + return delay_ps
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/calibrator/energy.html b/sed/2.1.0/_modules/sed/calibrator/energy.html new file mode 100644 index 0000000..4ab330b --- /dev/null +++ b/sed/2.1.0/_modules/sed/calibrator/energy.html @@ -0,0 +1,2910 @@ + + + + + + + + + + sed.calibrator.energy — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.calibrator.energy

+"""sed.calibrator.energy module. Code for energy calibration and
+correction. Mostly ported from https://github.com/mpes-kit/mpes.
+"""
+import itertools as it
+import warnings as wn
+from copy import deepcopy
+from datetime import datetime
+from functools import partial
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Literal
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import bokeh.plotting as pbk
+import dask.dataframe
+import h5py
+import ipywidgets as ipw
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+import psutil
+import xarray as xr
+from bokeh.io import output_notebook
+from bokeh.palettes import Category10 as ColorCycle
+from fastdtw import fastdtw
+from IPython.display import display
+from lmfit import Minimizer
+from lmfit import Parameters
+from lmfit.printfuncs import report_fit
+from numpy.linalg import lstsq
+from scipy.signal import savgol_filter
+from scipy.sparse.linalg import lsqr
+
+from sed.binning import bin_dataframe
+from sed.core import dfops
+from sed.loader.base.loader import BaseLoader
+
+
+
[docs]class EnergyCalibrator: + """Electron binding energy calibration workflow. + + For the initialization of the EnergyCalibrator class an instance of a + loader is required. The data can be loaded using the optional arguments, + or using the load_data method or bin_data method. + + Args: + loader (BaseLoader): Instance of a loader, subclassed from BaseLoader. + biases (np.ndarray, optional): Bias voltages used. Defaults to None. + traces (np.ndarray, optional): TOF-Data traces corresponding to the bias + values. Defaults to None. + tof (np.ndarray, optional): TOF-values for the data traces. + Defaults to None. + config (dict, optional): Config dictionary. Defaults to None. + """ + + def __init__( + self, + loader: BaseLoader, + biases: np.ndarray = None, + traces: np.ndarray = None, + tof: np.ndarray = None, + config: dict = None, + ): + """For the initialization of the EnergyCalibrator class an instance of a + loader is required. The data can be loaded using the optional arguments, + or using the load_data method or bin_data method. + + Args: + loader (BaseLoader): Instance of a loader, subclassed from BaseLoader. + biases (np.ndarray, optional): Bias voltages used. Defaults to None. + traces (np.ndarray, optional): TOF-Data traces corresponding to the bias + values. Defaults to None. + tof (np.ndarray, optional): TOF-values for the data traces. + Defaults to None. + config (dict, optional): Config dictionary. Defaults to None. + """ + self.loader = loader + self.biases: np.ndarray = None + self.traces: np.ndarray = None + self.traces_normed: np.ndarray = None + self.tof: np.ndarray = None + + if traces is not None and tof is not None and biases is not None: + self.load_data(biases=biases, traces=traces, tof=tof) + + if config is None: + config = {} + + self._config = config + + self.featranges: List[Tuple] = [] # Value ranges for feature detection + self.peaks: np.ndarray = np.asarray([]) + self.calibration: Dict[str, Any] = self._config["energy"].get("calibration", {}) + + self.tof_column = self._config["dataframe"]["tof_column"] + self.tof_ns_column = self._config["dataframe"].get("tof_ns_column", None) + self.corrected_tof_column = self._config["dataframe"]["corrected_tof_column"] + self.energy_column = self._config["dataframe"]["energy_column"] + self.x_column = self._config["dataframe"]["x_column"] + self.y_column = self._config["dataframe"]["y_column"] + self.binwidth: float = self._config["dataframe"]["tof_binwidth"] + self.binning: int = self._config["dataframe"]["tof_binning"] + self.x_width = self._config["energy"]["x_width"] + self.y_width = self._config["energy"]["y_width"] + self.tof_width = np.asarray( + self._config["energy"]["tof_width"], + ) / 2 ** (self.binning - 1) + self.tof_fermi = self._config["energy"]["tof_fermi"] / 2 ** (self.binning - 1) + self.color_clip = self._config["energy"]["color_clip"] + self.sector_delays = self._config["dataframe"].get("sector_delays", None) + self.sector_id_column = self._config["dataframe"].get("sector_id_column", None) + self.offsets: Dict[str, Any] = self._config["energy"].get("offsets", {}) + self.correction: Dict[str, Any] = self._config["energy"].get("correction", {}) + + @property + def ntraces(self) -> int: + """Property returning the number of traces. + + Returns: + int: The number of loaded/calculated traces. + """ + return len(self.traces) + + @property + def nranges(self) -> int: + """Property returning the number of specified feature ranges which Can be a + multiple of ntraces. + + Returns: + int: The number of specified feature ranges. + """ + return len(self.featranges) + + @property + def dup(self) -> int: + """Property returning the duplication number, i.e. the number of feature + ranges per trace. + + Returns: + int: The duplication number. + """ + return int(np.round(self.nranges / self.ntraces)) + +
[docs] def load_data( + self, + biases: np.ndarray = None, + traces: np.ndarray = None, + tof: np.ndarray = None, + ): + """Load data into the class. Not provided parameters will be overwritten by + empty arrays. + + Args: + biases (np.ndarray, optional): Bias voltages used. Defaults to None. + traces (np.ndarray, optional): TOF-Data traces corresponding to the bias + values. Defaults to None. + tof (np.ndarray, optional): TOF-values for the data traces. + Defaults to None. + """ + if biases is not None: + self.biases = biases + else: + self.biases = np.asarray([]) + if tof is not None: + self.tof = tof + else: + self.tof = np.asarray([]) + if traces is not None: + self.traces = self.traces_normed = traces + else: + self.traces = self.traces_normed = np.asarray([])
+ +
[docs] def bin_data( + self, + data_files: List[str], + axes: List[str] = None, + bins: List[int] = None, + ranges: Sequence[Tuple[float, float]] = None, + biases: np.ndarray = None, + bias_key: str = None, + **kwds, + ): + """Bin data from single-event files, and load into class. + + Args: + data_files (List[str]): list of file names to bin + axes (List[str], optional): bin axes. Defaults to + config["dataframe"]["tof_column"]. + bins (List[int], optional): number of bins. + Defaults to config["energy"]["bins"]. + ranges (Sequence[Tuple[float, float]], optional): bin ranges. + Defaults to config["energy"]["ranges"]. + biases (np.ndarray, optional): Bias voltages used. + If not provided, biases are extracted from the file meta data. + bias_key (str, optional): hdf5 path where bias values are stored. + Defaults to config["energy"]["bias_key"]. + **kwds: Keyword parameters for bin_dataframe + """ + if axes is None: + axes = [self.tof_column] + if bins is None: + bins = [self._config["energy"]["bins"]] + if ranges is None: + ranges_ = [ + np.array(self._config["energy"]["ranges"]) / 2 ** (self.binning - 1), + ] + ranges = [cast(Tuple[float, float], tuple(v)) for v in ranges_] + # pylint: disable=duplicate-code + hist_mode = kwds.pop("hist_mode", self._config["binning"]["hist_mode"]) + mode = kwds.pop("mode", self._config["binning"]["mode"]) + pbar = kwds.pop("pbar", self._config["binning"]["pbar"]) + try: + num_cores = kwds.pop("num_cores", self._config["binning"]["num_cores"]) + except KeyError: + num_cores = psutil.cpu_count() - 1 + threads_per_worker = kwds.pop( + "threads_per_worker", + self._config["binning"]["threads_per_worker"], + ) + threadpool_api = kwds.pop( + "threadpool_API", + self._config["binning"]["threadpool_API"], + ) + + read_biases = False + if biases is None: + read_biases = True + if bias_key is None: + try: + bias_key = self._config["energy"]["bias_key"] + except KeyError as exc: + raise ValueError( + "Either Bias Values or a valid bias_key has to be present!", + ) from exc + + dataframe, _, _ = self.loader.read_dataframe( + files=data_files, + collect_metadata=False, + ) + traces = bin_dataframe( + dataframe, + bins=bins, + axes=axes, + ranges=ranges, + hist_mode=hist_mode, + mode=mode, + pbar=pbar, + n_cores=num_cores, + threads_per_worker=threads_per_worker, + threadpool_api=threadpool_api, + return_partitions=True, + **kwds, + ) + if read_biases: + if bias_key: + try: + biases = extract_bias(data_files, bias_key) + except KeyError as exc: + raise ValueError( + "Either Bias Values or a valid bias_key has to be present!", + ) from exc + tof = traces.coords[(axes[0])] + self.traces = self.traces_normed = np.asarray(traces.T) + self.tof = np.asarray(tof) + self.biases = np.asarray(biases)
+ +
[docs] def normalize(self, smooth: bool = False, span: int = 7, order: int = 1): + """Normalize the spectra along an axis. + + Args: + smooth (bool, optional): Option to smooth the signals before normalization. + Defaults to False. + span (int, optional): span smoothing parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``). Defaults to 7. + order (int, optional): order smoothing parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``). Defaults to 1. + """ + self.traces_normed = normspec( + self.traces, + smooth=smooth, + span=span, + order=order, + )
+ +
[docs] def adjust_ranges( + self, + ranges: Tuple, + ref_id: int = 0, + traces: np.ndarray = None, + peak_window: int = 7, + apply: bool = False, + **kwds, + ): + """Display a tool to select or extract the equivalent feature ranges + (containing the peaks) among all traces. + + Args: + ranges (Tuple): + Collection of feature detection ranges, within which an algorithm + (i.e. 1D peak detector) with look for the feature. + ref_id (int, optional): Index of the reference trace. Defaults to 0. + traces (np.ndarray, optional): Collection of energy dispersion curves. + Defaults to self.traces_normed. + peak_window (int, optional): area around a peak to check for other peaks. + Defaults to 7. + apply (bool, optional): Option to directly apply the provided parameters. + Defaults to False. + **kwds: + keyword arguments for trace alignment (see ``find_correspondence()``). + """ + if traces is None: + traces = self.traces_normed + + self.add_ranges( + ranges=ranges, + ref_id=ref_id, + traces=traces, + infer_others=True, + mode="replace", + ) + self.feature_extract(peak_window=peak_window) + + # make plot + labels = kwds.pop("labels", [str(b) + " V" for b in self.biases]) + figsize = kwds.pop("figsize", (8, 4)) + plot_segs = [] + plot_peaks = [] + fig, ax = plt.subplots(figsize=figsize) + colors = plt.get_cmap("rainbow")(np.linspace(0, 1, len(traces))) + for itr, color in zip(range(len(traces)), colors): + trace = traces[itr, :] + # main traces + ax.plot( + self.tof, + trace, + ls="-", + color=color, + linewidth=1, + label=labels[itr], + ) + # segments: + seg = self.featranges[itr] + cond = (self.tof >= seg[0]) & (self.tof <= seg[1]) + tofseg, traceseg = self.tof[cond], trace[cond] + (line,) = ax.plot( + tofseg, + traceseg, + ls="-", + color=color, + linewidth=3, + ) + plot_segs.append(line) + # markers + (scatt,) = ax.plot( + self.peaks[itr, 0], + self.peaks[itr, 1], + ls="", + marker=".", + color="k", + markersize=10, + ) + plot_peaks.append(scatt) + ax.legend(fontsize=8, loc="upper right") + ax.set_title("") + + def update(refid, ranges): + self.add_ranges(ranges, refid, traces=traces) + self.feature_extract(peak_window=7) + for itr, _ in enumerate(self.traces_normed): + seg = self.featranges[itr] + cond = (self.tof >= seg[0]) & (self.tof <= seg[1]) + tofseg, traceseg = ( + self.tof[cond], + self.traces_normed[itr][cond], + ) + plot_segs[itr].set_ydata(traceseg) + plot_segs[itr].set_xdata(tofseg) + + plot_peaks[itr].set_xdata(self.peaks[itr, 0]) + plot_peaks[itr].set_ydata(self.peaks[itr, 1]) + + fig.canvas.draw_idle() + + refid_slider = ipw.IntSlider( + value=ref_id, + min=0, + max=10, + step=1, + ) + + ranges_slider = ipw.IntRangeSlider( + value=list(ranges), + min=min(self.tof), + max=max(self.tof), + step=1, + ) + + update(ranges=ranges, refid=ref_id) + + ipw.interact( + update, + refid=refid_slider, + ranges=ranges_slider, + ) + + def apply_func(apply: bool): # noqa: ARG001 + self.add_ranges( + ranges_slider.value, + refid_slider.value, + traces=self.traces_normed, + ) + self.feature_extract(peak_window=7) + ranges_slider.close() + refid_slider.close() + apply_button.close() + + apply_button = ipw.Button(description="apply") + display(apply_button) # pylint: disable=duplicate-code + apply_button.on_click(apply_func) + plt.show() + + if apply: + apply_func(True)
+ +
[docs] def add_ranges( + self, + ranges: Union[List[Tuple], Tuple], + ref_id: int = 0, + traces: np.ndarray = None, + infer_others: bool = True, + mode: str = "replace", + **kwds, + ): + """Select or extract the equivalent feature ranges (containing the peaks) among all traces. + + Args: + ranges (Union[List[Tuple], Tuple]): + Collection of feature detection ranges, within which an algorithm + (i.e. 1D peak detector) with look for the feature. + ref_id (int, optional): Index of the reference trace. Defaults to 0. + traces (np.ndarray, optional): Collection of energy dispersion curves. + Defaults to self.traces_normed. + infer_others (bool, optional): Option to infer the feature detection range + in other traces from a given one using a time warp algorithm. + Defaults to True. + mode (str, optional): Specification on how to change the feature ranges + ('append' or 'replace'). Defaults to "replace". + **kwds: + keyword arguments for trace alignment (see ``find_correspondence()``). + """ + if traces is None: + traces = self.traces_normed + + # Infer the corresponding feature detection range of other traces by alignment + if infer_others: + assert isinstance(ranges, tuple) + newranges: List[Tuple] = [] + + for i in range(self.ntraces): + pathcorr = find_correspondence( + traces[ref_id, :], + traces[i, :], + **kwds, + ) + newranges.append(range_convert(self.tof, ranges, pathcorr)) + + else: + if isinstance(ranges, list): + newranges = ranges + else: + newranges = [ranges] + + if mode == "append": + self.featranges += newranges + elif mode == "replace": + self.featranges = newranges
+ +
[docs] def feature_extract( + self, + ranges: List[Tuple] = None, + traces: np.ndarray = None, + peak_window: int = 7, + ): + """Select or extract the equivalent landmarks (e.g. peaks) among all traces. + + Args: + ranges (List[Tuple], optional): List of ranges in each trace to look for + the peak feature, [start, end]. Defaults to self.featranges. + traces (np.ndarray, optional): Collection of 1D spectra to use for + calibration. Defaults to self.traces_normed. + peak_window (int, optional): area around a peak to check for other peaks. + Defaults to 7. + """ + if ranges is None: + ranges = self.featranges + + if traces is None: + traces = self.traces_normed + + # Augment the content of the calibration data + traces_aug = np.tile(traces, (self.dup, 1)) + # Run peak detection for each trace within the specified ranges + self.peaks = peaksearch( + traces_aug, + self.tof, + ranges=ranges, + pkwindow=peak_window, + )
+ +
[docs] def calibrate( + self, + ref_id: int = 0, + method: str = "lmfit", + energy_scale: str = "kinetic", + landmarks: np.ndarray = None, + biases: np.ndarray = None, + t: np.ndarray = None, + verbose: bool = True, + **kwds, + ) -> dict: + """Calculate the functional mapping between time-of-flight and the energy + scale using optimization methods. + + Args: + ref_id (int, optional): The reference trace index (an integer). + Defaults to 0. + method (str, optional): Method for determining the energy calibration. + + - **'lmfit'**: Energy calibration using lmfit and 1/t^2 form. + - **'lstsq'**, **'lsqr'**: Energy calibration using polynomial form. + + Defaults to 'lmfit'. + energy_scale (str, optional): Direction of increasing energy scale. + + - **'kinetic'**: increasing energy with decreasing TOF. + - **'binding'**: increasing energy with increasing TOF. + + Defaults to "kinetic". + landmarks (np.ndarray, optional): Extracted peak positions (TOF) used for + calibration. Defaults to self.peaks. + biases (np.ndarray, optional): Bias values. Defaults to self.biases. + t (np.ndarray, optional): TOF values. Defaults to self.tof. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + **kwds: keyword arguments. + See available keywords for ``poly_energy_calibration()`` and + ``fit_energy_calibration()`` + + Raises: + ValueError: Raised if invalid 'energy_scale' is passed. + NotImplementedError: Raised if invalid 'method' is passed. + + Returns: + dict: Calibration dictionary with coefficients. + """ + if landmarks is None: + landmarks = self.peaks[:, 0] + if biases is None: + biases = self.biases + if t is None: + t = self.tof + if energy_scale == "kinetic": + sign = -1 + elif energy_scale == "binding": + sign = 1 + else: + raise ValueError( + 'energy_scale needs to be either "binding" or "kinetic"', + f", got {energy_scale}.", + ) + + binwidth = kwds.pop("binwidth", self.binwidth) + binning = kwds.pop("binning", self.binning) + + if method == "lmfit": + self.calibration = fit_energy_calibration( + landmarks, + sign * biases, + binwidth, + binning, + ref_id=ref_id, + t=t, + energy_scale=energy_scale, + verbose=verbose, + **kwds, + ) + elif method in ("lstsq", "lsqr"): + self.calibration = poly_energy_calibration( + landmarks, + sign * biases, + ref_id=ref_id, + aug=self.dup, + method=method, + t=t, + energy_scale=energy_scale, + **kwds, + ) + else: + raise NotImplementedError() + + self.calibration["creation_date"] = datetime.now().timestamp() + return self.calibration
+ +
[docs] def view( # pylint: disable=dangerous-default-value + self, + traces: np.ndarray, + segs: List[Tuple] = None, + peaks: np.ndarray = None, + show_legend: bool = True, + backend: str = "matplotlib", + linekwds: dict = {}, + linesegkwds: dict = {}, + scatterkwds: dict = {}, + legkwds: dict = {}, + **kwds, + ): + """Display a plot showing line traces with annotation. + + Args: + traces (np.ndarray): Matrix of traces to visualize. + segs (List[Tuple], optional): Segments to be highlighted in the + visualization. Defaults to None. + peaks (np.ndarray, optional): Peak positions for labelling the traces. + Defaults to None. + show_legend (bool, optional): Option to display bias voltages as legends. + Defaults to True. + backend (str, optional): Backend specification, choose between 'matplotlib' + (static) or 'bokeh' (interactive). Defaults to "matplotlib". + linekwds (dict, optional): Keyword arguments for line plotting + (see ``matplotlib.pyplot.plot()``). Defaults to {}. + linesegkwds (dict, optional): Keyword arguments for line segments plotting + (see ``matplotlib.pyplot.plot()``). Defaults to {}. + scatterkwds (dict, optional): Keyword arguments for scatter plot + (see ``matplotlib.pyplot.scatter()``). Defaults to {}. + legkwds (dict, optional): Keyword arguments for legend + (see ``matplotlib.pyplot.legend()``). Defaults to {}. + **kwds: keyword arguments: + + - **labels** (list): Labels for each curve + - **xaxis** (np.ndarray): x (horizontal) axis values + - **title** (str): Title of the plot + - **legend_location** (str): Location of the plot legend + - **align** (bool): Option to shift traces by bias voltage + """ + lbs = kwds.pop("labels", [str(b) + " V" for b in self.biases]) + xaxis = kwds.pop("xaxis", self.tof) + ttl = kwds.pop("title", "") + align = kwds.pop("align", False) + energy_scale = kwds.pop("energy_scale", "kinetic") + + sign = 1 if energy_scale == "kinetic" else -1 + + if backend == "matplotlib": + figsize = kwds.pop("figsize", (12, 4)) + fig, ax = plt.subplots(figsize=figsize) + for itr, trace in enumerate(traces): + if align: + ax.plot( + xaxis + sign * (self.biases[itr] - self.biases[self.calibration["refid"]]), + trace, + ls="-", + linewidth=1, + label=lbs[itr], + **linekwds, + ) + else: + ax.plot( + xaxis, + trace, + ls="-", + linewidth=1, + label=lbs[itr], + **linekwds, + ) + + # Emphasize selected EDC segments + if segs is not None: + seg = segs[itr] + cond = (self.tof >= seg[0]) & (self.tof <= seg[1]) + tofseg, traceseg = self.tof[cond], trace[cond] + ax.plot( + tofseg, + traceseg, + ls="-", + linewidth=2, + **linesegkwds, + ) + # Emphasize extracted local maxima + if peaks is not None: + ax.scatter( + peaks[itr, 0], + peaks[itr, 1], + s=30, + **scatterkwds, + ) + + if show_legend: + try: + ax.legend(fontsize=12, **legkwds) + except TypeError: + pass + + ax.set_title(ttl) + + elif backend == "bokeh": + output_notebook(hide_banner=True) + colors = it.cycle(ColorCycle[10]) + ttp = [("(x, y)", "($x, $y)")] + + figsize = kwds.pop("figsize", (800, 300)) + fig = pbk.figure( + title=ttl, + width=figsize[0], + height=figsize[1], + tooltips=ttp, + ) + # Plotting the main traces + for itr, color in zip(range(len(traces)), colors): + trace = traces[itr, :] + if align: + fig.line( + xaxis + sign * (self.biases[itr] - self.biases[self.calibration["refid"]]), + trace, + color=color, + line_dash="solid", + line_width=1, + line_alpha=1, + legend_label=lbs[itr], + **kwds, + ) + else: + fig.line( + xaxis, + trace, + color=color, + line_dash="solid", + line_width=1, + line_alpha=1, + legend_label=lbs[itr], + **kwds, + ) + + # Emphasize selected EDC segments + if segs is not None: + seg = segs[itr] + cond = (self.tof >= seg[0]) & (self.tof <= seg[1]) + tofseg, traceseg = self.tof[cond], trace[cond] + fig.line( + tofseg, + traceseg, + color=color, + line_width=3, + **linekwds, + ) + + # Plot detected peaks + if peaks is not None: + fig.scatter( + peaks[itr, 0], + peaks[itr, 1], + fill_color=color, + fill_alpha=0.8, + line_color=None, + size=5, + **scatterkwds, + ) + + if show_legend: + fig.legend.location = kwds.pop("legend_location", "top_right") + fig.legend.spacing = 0 + fig.legend.padding = 2 + + pbk.show(fig)
+ +
[docs] def append_energy_axis( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + tof_column: str = None, + energy_column: str = None, + calibration: dict = None, + verbose: bool = True, + **kwds, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Calculate and append the energy axis to the events dataframe. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): + Dataframe to apply the energy axis calibration to. + tof_column (str, optional): Label of the source column. + Defaults to config["dataframe"]["tof_column"]. + energy_column (str, optional): Label of the destination column. + Defaults to config["dataframe"]["energy_column"]. + calibration (dict, optional): Calibration dictionary. If provided, + overrides calibration from class or config. + Defaults to self.calibration or config["energy"]["calibration"]. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + **kwds: additional keyword arguments for the energy conversion. They are + added to the calibration dictionary. + + Raises: + ValueError: Raised if expected calibration parameters are missing. + NotImplementedError: Raised if an invalid calib_type is found. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added column + and energy calibration metadata dictionary. + """ + if tof_column is None: + if self.corrected_tof_column in df.columns: + tof_column = self.corrected_tof_column + else: + tof_column = self.tof_column + + if energy_column is None: + energy_column = self.energy_column + + binwidth = kwds.pop("binwidth", self.binwidth) + binning = kwds.pop("binning", self.binning) + + # pylint: disable=duplicate-code + if calibration is None: + calibration = deepcopy(self.calibration) + + if len(kwds) > 0: + for key, value in kwds.items(): + calibration[key] = value + calibration["creation_date"] = datetime.now().timestamp() + + elif "creation_date" in calibration and verbose: + datestring = datetime.fromtimestamp(calibration["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy calibration parameters generated on {datestring}") + + # try to determine calibration type if not provided + if "calib_type" not in calibration: + if "t0" in calibration and "d" in calibration and "E0" in calibration: + calibration["calib_type"] = "fit" + if "energy_scale" not in calibration: + calibration["energy_scale"] = "kinetic" + + elif "coeffs" in calibration and "E0" in calibration: + calibration["calib_type"] = "poly" + else: + raise ValueError("No valid calibration parameters provided!") + + if calibration["calib_type"] == "fit": + # Fitting metadata for nexus + calibration["fit_function"] = "(a0/(x0-a1))**2 + a2" + calibration["coefficients"] = np.array( + [ + calibration["d"], + calibration["t0"], + calibration["E0"], + ], + ) + df[energy_column] = tof2ev( + calibration["d"], + calibration["t0"], + binwidth, + binning, + calibration["energy_scale"], + calibration["E0"], + df[tof_column].astype("float64"), + ) + elif calibration["calib_type"] == "poly": + # Fitting metadata for nexus + fit_function = "a0" + for term in range(1, len(calibration["coeffs"]) + 1): + fit_function += f" + a{term}*x0**{term}" + calibration["fit_function"] = fit_function + calibration["coefficients"] = np.concatenate( + (calibration["coeffs"], [calibration["E0"]]), + )[::-1] + df[energy_column] = tof2evpoly( + calibration["coeffs"], + calibration["E0"], + df[tof_column].astype("float64"), + ) + else: + raise NotImplementedError + + metadata = self.gather_calibration_metadata(calibration) + + return df, metadata
+ +
[docs] def append_tof_ns_axis( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + tof_column: str = None, + tof_ns_column: str = None, + **kwds, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Converts the time-of-flight time from steps to time in ns. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to convert. + tof_column (str, optional): Name of the column containing the + time-of-flight steps. Defaults to config["dataframe"]["tof_column"]. + tof_ns_column (str, optional): Name of the column to store the + time-of-flight in nanoseconds. Defaults to config["dataframe"]["tof_ns_column"]. + binwidth (float, optional): Time-of-flight binwidth in ns. + Defaults to config["energy"]["tof_binwidth"]. + binning (int, optional): Time-of-flight binning factor. + Defaults to config["energy"]["tof_binning"]. + + Returns: + dask.dataframe.DataFrame: Dataframe with the new columns. + dict: Metadata dictionary. + """ + binwidth = kwds.pop("binwidth", self.binwidth) + binning = kwds.pop("binning", self.binning) + if tof_column is None: + if self.corrected_tof_column in df.columns: + tof_column = self.corrected_tof_column + else: + tof_column = self.tof_column + + if tof_ns_column is None: + tof_ns_column = self.tof_ns_column + + df[tof_ns_column] = tof2ns( + binwidth, + binning, + df[tof_column].astype("float64"), + ) + metadata: Dict[str, Any] = { + "applied": True, + "binwidth": binwidth, + "binning": binning, + } + return df, metadata
+ +
[docs] def gather_calibration_metadata(self, calibration: dict = None) -> dict: + """Collects metadata from the energy calibration + + Args: + calibration (dict, optional): Dictionary with energy calibration + parameters. Defaults to None. + + Returns: + dict: Generated metadata dictionary. + """ + if calibration is None: + calibration = self.calibration + metadata: Dict[Any, Any] = {} + metadata["applied"] = True + metadata["calibration"] = deepcopy(calibration) + metadata["tof"] = deepcopy(self.tof) + if metadata["tof"] is None: + metadata["tof"] = 0.0 + # create empty calibrated axis entry, if it is not present. + if "axis" not in metadata["calibration"]: + metadata["calibration"]["axis"] = 0.0 + + return metadata
+ +
[docs] def adjust_energy_correction( + self, + image: xr.DataArray, + correction_type: str = None, + amplitude: float = None, + center: Tuple[float, float] = None, + correction: dict = None, + apply: bool = False, + **kwds, + ): + """Visualize the energy correction function on top of the TOF/X/Y graphs. + + Args: + image (xr.DataArray): Image data cube (x, y, tof) of binned data to plot. + correction_type (str, optional): Type of correction to apply to the TOF + axis. Valid values are: + + - 'spherical' + - 'Lorentzian' + - 'Gaussian' + - 'Lorentzian_asymmetric' + + Defaults to config["energy"]["correction_type"]. + amplitude (float, optional): Amplitude of the time-of-flight correction + term. Defaults to config["energy"]["correction"]["correction_type"]. + center (Tuple[float, float], optional): Center (x/y) coordinates for the + correction. Defaults to config["energy"]["correction"]["center"]. + correction (dict, optional): Correction dict. Defaults to the config values + and is updated from provided and adjusted parameters. + apply (bool, optional): whether to store the provided parameters within + the class. Defaults to False. + **kwds: Additional parameters to use for the adjustment plots: + + - **x_column** (str): Name of the x column. + - **y_column** (str): Name of the y column. + - **tof_column** (str): Name of the tog column to convert. + - **x_width** (int, int): x range to integrate around the center + - **y_width** (int, int): y range to integrate around the center + - **tof_fermi** (int): TOF value of the Fermi level + - **tof_width** (int, int): TOF range to plot around tof_fermi + - **color_clip** (int): highest value to plot in the color range + + Additional parameters for the correction functions: + + - **d** (float): Field-free drift distance. + - **gamma** (float): Linewidth value for correction using a 2D + Lorentz profile. + - **sigma** (float): Standard deviation for correction using a 2D + Gaussian profile. + - **gamma2** (float): Linewidth value for correction using an + asymmetric 2D Lorentz profile, X-direction. + - **amplitude2** (float): Amplitude value for correction using an + asymmetric 2D Lorentz profile, X-direction. + + Raises: + NotImplementedError: Raised for invalid correction_type. + """ + matplotlib.use("module://ipympl.backend_nbagg") + + if correction is None: + correction = deepcopy(self.correction) + + if correction_type is not None: + correction["correction_type"] = correction_type + + if amplitude is not None: + correction["amplitude"] = amplitude + + if center is not None: + correction["center"] = center + + x_column = kwds.pop("x_column", self.x_column) + y_column = kwds.pop("y_column", self.y_column) + tof_column = kwds.pop("tof_column", self.tof_column) + x_width = kwds.pop("x_width", self.x_width) + y_width = kwds.pop("y_width", self.y_width) + tof_fermi = kwds.pop("tof_fermi", self.tof_fermi) + tof_width = kwds.pop("tof_width", self.tof_width) + color_clip = kwds.pop("color_clip", self.color_clip) + + correction = {**correction, **kwds} + + if not {"correction_type", "amplitude", "center"}.issubset(set(correction.keys())): + raise ValueError( + "No valid energy correction found in config and required parameters missing!", + ) + + if isinstance(correction["center"], list): + correction["center"] = tuple(correction["center"]) + + x = image.coords[x_column].values + y = image.coords[y_column].values + + x_center = correction["center"][0] + y_center = correction["center"][1] + + correction_x = tof_fermi - correction_function( + x=x, + y=y_center, + **correction, + ) + correction_y = tof_fermi - correction_function( + x=x_center, + y=y, + **correction, + ) + fig, ax = plt.subplots(2, 1) + image.loc[ + { + y_column: slice(y_center + y_width[0], y_center + y_width[1]), + tof_column: slice( + tof_fermi + tof_width[0], + tof_fermi + tof_width[1], + ), + } + ].sum(dim=y_column).T.plot( + ax=ax[0], + cmap="terrain_r", + vmax=color_clip, + yincrease=False, + ) + image.loc[ + { + x_column: slice(x_center + x_width[0], x_center + x_width[1]), + tof_column: slice( + tof_fermi + tof_width[0], + tof_fermi + tof_width[1], + ), + } + ].sum(dim=x_column).T.plot( + ax=ax[1], + cmap="terrain_r", + vmax=color_clip, + yincrease=False, + ) + (trace1,) = ax[0].plot(x, correction_x) + line1 = ax[0].axvline(x=x_center) + (trace2,) = ax[1].plot(y, correction_y) + line2 = ax[1].axvline(x=y_center) + + amplitude_slider = ipw.FloatSlider( + value=correction["amplitude"], + min=0, + max=10, + step=0.1, + ) + x_center_slider = ipw.FloatSlider( + value=x_center, + min=0, + max=self._config["momentum"]["detector_ranges"][0][1], + step=1, + ) + y_center_slider = ipw.FloatSlider( + value=y_center, + min=0, + max=self._config["momentum"]["detector_ranges"][1][1], + step=1, + ) + + def update(amplitude, x_center, y_center, **kwds): + nonlocal correction + correction["amplitude"] = amplitude + correction["center"] = (x_center, y_center) + correction = {**correction, **kwds} + correction_x = tof_fermi - correction_function( + x=x, + y=y_center, + **correction, + ) + correction_y = tof_fermi - correction_function( + x=x_center, + y=y, + **correction, + ) + + trace1.set_ydata(correction_x) + line1.set_xdata(x=x_center) + trace2.set_ydata(correction_y) + line2.set_xdata(x=y_center) + + fig.canvas.draw_idle() + + def common_apply_func(apply: bool): # noqa: ARG001 + self.correction = {} + self.correction["amplitude"] = correction["amplitude"] + self.correction["center"] = correction["center"] + self.correction["correction_type"] = correction["correction_type"] + self.correction["creation_date"] = datetime.now().timestamp() + amplitude_slider.close() + x_center_slider.close() + y_center_slider.close() + apply_button.close() + + if correction["correction_type"] == "spherical": + try: + update(correction["amplitude"], x_center, y_center, diameter=correction["diameter"]) + except KeyError as exc: + raise ValueError( + "Parameter 'diameter' required for correction type 'spherical', ", + "but not present!", + ) from exc + + diameter_slider = ipw.FloatSlider( + value=correction["diameter"], + min=0, + max=10000, + step=100, + ) + + ipw.interact( + update, + amplitude=amplitude_slider, + x_center=x_center_slider, + y_center=y_center_slider, + diameter=diameter_slider, + ) + + def apply_func(apply: bool): + common_apply_func(apply) + self.correction["diameter"] = correction["diameter"] + diameter_slider.close() + + elif correction["correction_type"] == "Lorentzian": + try: + update(correction["amplitude"], x_center, y_center, gamma=correction["gamma"]) + except KeyError as exc: + raise ValueError( + "Parameter 'gamma' required for correction type 'Lorentzian', but not present!", + ) from exc + + gamma_slider = ipw.FloatSlider( + value=correction["gamma"], + min=0, + max=2000, + step=1, + ) + + ipw.interact( + update, + amplitude=amplitude_slider, + x_center=x_center_slider, + y_center=y_center_slider, + gamma=gamma_slider, + ) + + def apply_func(apply: bool): + common_apply_func(apply) + self.correction["gamma"] = correction["gamma"] + gamma_slider.close() + + elif correction["correction_type"] == "Gaussian": + try: + update(correction["amplitude"], x_center, y_center, sigma=correction["sigma"]) + except KeyError as exc: + raise ValueError( + "Parameter 'sigma' required for correction type 'Gaussian', but not present!", + ) from exc + + sigma_slider = ipw.FloatSlider( + value=correction["sigma"], + min=0, + max=1000, + step=1, + ) + + ipw.interact( + update, + amplitude=amplitude_slider, + x_center=x_center_slider, + y_center=y_center_slider, + sigma=sigma_slider, + ) + + def apply_func(apply: bool): + common_apply_func(apply) + self.correction["sigma"] = correction["sigma"] + sigma_slider.close() + + elif correction["correction_type"] == "Lorentzian_asymmetric": + try: + if "amplitude2" not in correction: + correction["amplitude2"] = correction["amplitude"] + if "sigma2" not in correction: + correction["gamma2"] = correction["gamma"] + update( + correction["amplitude"], + x_center, + y_center, + gamma=correction["gamma"], + amplitude2=correction["amplitude2"], + gamma2=correction["gamma2"], + ) + except KeyError as exc: + raise ValueError( + "Parameter 'gamma' required for correction type 'Lorentzian_asymmetric', ", + "but not present!", + ) from exc + + gamma_slider = ipw.FloatSlider( + value=correction["gamma"], + min=0, + max=2000, + step=1, + ) + + amplitude2_slider = ipw.FloatSlider( + value=correction["amplitude2"], + min=0, + max=10, + step=0.1, + ) + + gamma2_slider = ipw.FloatSlider( + value=correction["gamma2"], + min=0, + max=2000, + step=1, + ) + + ipw.interact( + update, + amplitude=amplitude_slider, + x_center=x_center_slider, + y_center=y_center_slider, + gamma=gamma_slider, + amplitude2=amplitude2_slider, + gamma2=gamma2_slider, + ) + + def apply_func(apply: bool): + common_apply_func(apply) + self.correction["gamma"] = correction["gamma"] + self.correction["amplitude2"] = correction["amplitude2"] + self.correction["gamma2"] = correction["gamma2"] + gamma_slider.close() + amplitude2_slider.close() + gamma2_slider.close() + + else: + raise NotImplementedError + # pylint: disable=duplicate-code + apply_button = ipw.Button(description="apply") + display(apply_button) + apply_button.on_click(apply_func) + plt.show() + + if apply: + apply_func(True)
+ +
[docs] def apply_energy_correction( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + tof_column: str = None, + new_tof_column: str = None, + correction_type: str = None, + amplitude: float = None, + correction: dict = None, + verbose: bool = True, + **kwds, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Apply correction to the time-of-flight (TOF) axis of single-event data. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): The dataframe where + to apply the energy correction to. + tof_column (str, optional): Name of the source column to convert. + Defaults to config["dataframe"]["tof_column"]. + new_tof_column (str, optional): Name of the destination column to convert. + Defaults to config["dataframe"]["corrected_tof_column"]. + correction_type (str, optional): Type of correction to apply to the TOF + axis. Valid values are: + + - 'spherical' + - 'Lorentzian' + - 'Gaussian' + - 'Lorentzian_asymmetric' + + Defaults to config["energy"]["correction_type"]. + amplitude (float, optional): Amplitude of the time-of-flight correction + term. Defaults to config["energy"]["correction"]["correction_type"]. + correction (dict, optional): Correction dictionary containing parameters + for the correction. Defaults to self.correction or + config["energy"]["correction"]. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + **kwds: Additional parameters to use for the correction: + + - **x_column** (str): Name of the x column. + - **y_column** (str): Name of the y column. + - **d** (float): Field-free drift distance. + - **gamma** (float): Linewidth value for correction using a 2D + Lorentz profile. + - **sigma** (float): Standard deviation for correction using a 2D + Gaussian profile. + - **gamma2** (float): Linewidth value for correction using an + asymmetric 2D Lorentz profile, X-direction. + - **amplitude2** (float): Amplitude value for correction using an + asymmetric 2D Lorentz profile, X-direction. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added column + and Energy correction metadata dictionary. + """ + if correction is None: + correction = deepcopy(self.correction) + + x_column = kwds.pop("x_column", self.x_column) + y_column = kwds.pop("y_column", self.y_column) + + if tof_column is None: + tof_column = self.tof_column + + if new_tof_column is None: + new_tof_column = self.corrected_tof_column + + if correction_type is not None or amplitude is not None or len(kwds) > 0: + if correction_type is not None: + correction["correction_type"] = correction_type + + if amplitude is not None: + correction["amplitude"] = amplitude + + for key, value in kwds.items(): + correction[key] = value + + correction["creation_date"] = datetime.now().timestamp() + + elif "creation_date" in correction and verbose: + datestring = datetime.fromtimestamp(correction["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy correction parameters generated on {datestring}") + + missing_keys = {"correction_type", "center", "amplitude"} - set(correction.keys()) + if missing_keys: + raise ValueError(f"Required correction parameters '{missing_keys}' missing!") + + df[new_tof_column] = df[tof_column] + correction_function( + x=df[x_column], + y=df[y_column], + **correction, + ) + metadata = self.gather_correction_metadata(correction=correction) + + return df, metadata
+ +
[docs] def gather_correction_metadata(self, correction: dict = None) -> dict: + """Collect meta data for energy correction + + Args: + correction (dict, optional): Dictionary with energy correction parameters. + Defaults to None. + + Returns: + dict: Generated metadata dictionary. + """ + if correction is None: + correction = self.correction + metadata: Dict[Any, Any] = {} + metadata["applied"] = True + metadata["correction"] = deepcopy(correction) + + return metadata
+ +
[docs] def align_dld_sectors( + self, + df: dask.dataframe.DataFrame, + tof_column: str = None, + sector_id_column: str = None, + sector_delays: np.ndarray = None, + ) -> Tuple[dask.dataframe.DataFrame, dict]: + """Aligns the time-of-flight axis of the different sections of a detector. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + tof_column (str, optional): Name of the column containing the time-of-flight values. + Defaults to config["dataframe"]["tof_column"]. + sector_id_column (str, optional): Name of the column containing the sector id values. + Defaults to config["dataframe"]["sector_id_column"]. + sector_delays (np.ndarray, optional): Array containing the sector delays. Defaults to + config["dataframe"]["sector_delays"]. + + Returns: + dask.dataframe.DataFrame: Dataframe with the new columns. + dict: Metadata dictionary. + """ + if sector_delays is None: + sector_delays = self.sector_delays + if sector_id_column is None: + sector_id_column = self.sector_id_column + + if sector_delays is None or sector_id_column is None: + raise ValueError( + "No value for sector_delays or sector_id_column found in config." + "Config file is not properly configured for dld sector correction.", + ) + tof_column = tof_column or self.tof_column + + # align the 8s sectors + sector_delays_arr = dask.array.from_array(sector_delays) + + def align_sector(x): + val = x[tof_column] - sector_delays_arr[x[sector_id_column].values.astype(int)] + return val.astype(np.float32) + + df[tof_column] = df.map_partitions(align_sector, meta=(tof_column, np.float32)) + metadata: Dict[str, Any] = { + "applied": True, + "sector_delays": sector_delays, + } + return df, metadata
+ +
[docs] def add_offsets( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame] = None, + offsets: Dict[str, Any] = None, + constant: float = None, + columns: Union[str, Sequence[str]] = None, + weights: Union[float, Sequence[float]] = None, + preserve_mean: Union[bool, Sequence[bool]] = False, + reductions: Union[str, Sequence[str]] = None, + energy_column: str = None, + verbose: bool = True, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Apply an offset to the energy column by the values of the provided columns. + + If no parameter is passed to this function, the offset is applied as defined in the + config file. If parameters are passed, they are used to generate a new offset dictionary + and the offset is applied using the ``dfops.apply_offset_from_columns()`` function. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + offsets (Dict, optional): Dictionary of energy offset parameters. + constant (float, optional): The constant to shift the energy axis by. + columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. + weights (Union[float, Sequence[float]]): weights to apply to the columns. + Can also be used to flip the sign (e.g. -1). Defaults to 1. + preserve_mean (bool): Whether to subtract the mean of the column before applying the + shift. Defaults to False. + reductions (str): The reduction to apply to the column. Should be an available method + of dask.dataframe.Series. For example "mean". In this case the function is applied + to the column to generate a single value for the whole dataset. If None, the shift + is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + energy_column (str, optional): Name of the column containing the energy values. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + + Returns: + dask.dataframe.DataFrame: Dataframe with the new columns. + dict: Metadata dictionary. + """ + if offsets is None: + offsets = deepcopy(self.offsets) + + if energy_column is None: + energy_column = self.energy_column + + metadata: Dict[str, Any] = { + "applied": True, + } + + # flip sign for binding energy scale + energy_scale = self.calibration.get("energy_scale", None) + if energy_scale is None: + raise ValueError("Energy scale not set. Cannot interpret the sign of the offset.") + if energy_scale not in ["binding", "kinetic"]: + raise ValueError(f"Invalid energy scale: {energy_scale}") + scale_sign: Literal[-1, 1] = -1 if energy_scale == "binding" else 1 + + if columns is not None or constant is not None: + # pylint:disable=duplicate-code + # use passed parameters, overwrite config + offsets = {} + offsets["creation_date"] = datetime.now().timestamp() + # column-based offsets + if columns is not None: + if weights is None: + weights = 1 + if isinstance(weights, (int, float, np.integer, np.floating)): + weights = [weights] + if len(weights) == 1: + weights = [weights[0]] * len(columns) + if not isinstance(weights, Sequence): + raise TypeError(f"Invalid type for weights: {type(weights)}") + if not all(isinstance(s, (int, float, np.integer, np.floating)) for s in weights): + raise TypeError(f"Invalid type for weights: {type(weights)}") + + if isinstance(columns, str): + columns = [columns] + if isinstance(preserve_mean, bool): + preserve_mean = [preserve_mean] * len(columns) + if not isinstance(reductions, Sequence): + reductions = [reductions] + if len(reductions) == 1: + reductions = [reductions[0]] * len(columns) + + # store in offsets dictionary + for col, weight, pmean, red in zip(columns, weights, preserve_mean, reductions): + offsets[col] = { + "weight": weight, + "preserve_mean": pmean, + "reduction": red, + } + + # constant offset + if isinstance(constant, (int, float, np.integer, np.floating)): + offsets["constant"] = constant + elif constant is not None: + raise TypeError(f"Invalid type for constant: {type(constant)}") + + elif "creation_date" in offsets and verbose: + datestring = datetime.fromtimestamp(offsets["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using energy offset parameters generated on {datestring}") + + if len(offsets) > 0: + # unpack dictionary + # pylint: disable=duplicate-code + columns = [] + weights = [] + preserve_mean = [] + reductions = [] + if verbose: + print("Energy offset parameters:") + for k, v in offsets.items(): + if k == "creation_date": + continue + if k == "constant": + # flip sign if binding energy scale + constant = v * scale_sign + if verbose: + print(f" Constant: {constant} ") + else: + columns.append(k) + try: + weight = v["weight"] + except KeyError: + weight = 1 + if not isinstance(weight, (int, float, np.integer, np.floating)): + raise TypeError(f"Invalid type for weight of column {k}: {type(weight)}") + # flip sign if binding energy scale + weight = weight * scale_sign + weights.append(weight) + pm = v.get("preserve_mean", False) + if str(pm).lower() in ["false", "0", "no"]: + pm = False + elif str(pm).lower() in ["true", "1", "yes"]: + pm = True + preserve_mean.append(pm) + red = v.get("reduction", None) + if str(red).lower() in ["none", "null"]: + red = None + reductions.append(red) + if verbose: + print( + f" Column[{k}]: Weight={weight}, Preserve Mean: {pm}, ", + f"Reductions: {red}.", + ) + + if len(columns) > 0: + df = dfops.offset_by_other_columns( + df=df, + target_column=energy_column, + offset_columns=columns, + weights=weights, + preserve_mean=preserve_mean, + reductions=reductions, + ) + + # apply constant + if constant: + if not isinstance(constant, (int, float, np.integer, np.floating)): + raise TypeError(f"Invalid type for constant: {type(constant)}") + df[energy_column] = df.map_partitions( + lambda x: x[energy_column] + constant, + meta=(energy_column, np.float64), + ) + + self.offsets = offsets + metadata["offsets"] = offsets + + return df, metadata
+ + +
[docs]def extract_bias(files: List[str], bias_key: str) -> np.ndarray: + """Read bias values from hdf5 files + + Args: + files (List[str]): List of filenames + bias_key (str): hdf5 path to the bias value + + Returns: + np.ndarray: Array of bias values. + """ + bias_list: List[float] = [] + for file in files: + with h5py.File(file, "r") as file_handle: + if bias_key[0] == "@": + bias_list.append(round(file_handle.attrs[bias_key[1:]], 2)) + else: + bias_list.append(round(file_handle[bias_key], 2)) + + return np.asarray(bias_list)
+ + +
[docs]def correction_function( + x: Union[float, np.ndarray], + y: Union[float, np.ndarray], + correction_type: str, + center: Tuple[float, float], + amplitude: float, + **kwds, +) -> Union[float, np.ndarray]: + """Calculate the TOF correction based on the given X/Y coordinates and a model. + + Args: + x (float): x coordinate + y (float): y coordinate + correction_type (str): type of correction. One of + "spherical", "Lorentzian", "Gaussian", or "Lorentzian_asymmetric" + center (Tuple[int, int]): center position of the distribution (x,y) + amplitude (float): Amplitude of the correction + **kwds: Keyword arguments: + + - **diameter** (float): Field-free drift distance. + - **gamma** (float): Linewidth value for correction using a 2D + Lorentz profile. + - **sigma** (float): Standard deviation for correction using a 2D + Gaussian profile. + - **gamma2** (float): Linewidth value for correction using an + asymmetric 2D Lorentz profile, X-direction. + - **amplitude2** (float): Amplitude value for correction using an + asymmetric 2D Lorentz profile, X-direction. + + Returns: + float: calculated correction value + """ + if correction_type == "spherical": + try: + diameter = kwds.pop("diameter") + except KeyError as exc: + raise ValueError( + f"Parameter 'diameter' required for correction type '{correction_type}' " + "but not provided!", + ) from exc + correction = -( + ( + 1 + - np.sqrt( + 1 - ((x - center[0]) ** 2 + (y - center[1]) ** 2) / diameter**2, + ) + ) + * 100 + * amplitude + ) + + elif correction_type == "Lorentzian": + try: + gamma = kwds.pop("gamma") + except KeyError as exc: + raise ValueError( + f"Parameter 'gamma' required for correction type '{correction_type}' " + "but not provided!", + ) from exc + correction = ( + 100000 + * amplitude + / (gamma * np.pi) + * (gamma**2 / ((x - center[0]) ** 2 + (y - center[1]) ** 2 + gamma**2) - 1) + ) + + elif correction_type == "Gaussian": + try: + sigma = kwds.pop("sigma") + except KeyError as exc: + raise ValueError( + f"Parameter 'sigma' required for correction type '{correction_type}' " + "but not provided!", + ) from exc + correction = ( + 20000 + * amplitude + / np.sqrt(2 * np.pi * sigma**2) + * ( + np.exp( + -((x - center[0]) ** 2 + (y - center[1]) ** 2) / (2 * sigma**2), + ) + - 1 + ) + ) + + elif correction_type == "Lorentzian_asymmetric": + try: + gamma = kwds.pop("gamma") + except KeyError as exc: + raise ValueError( + f"Parameter 'gamma' required for correction type '{correction_type}' " + "but not provided!", + ) from exc + gamma2 = kwds.pop("gamma2", gamma) + amplitude2 = kwds.pop("amplitude2", amplitude) + correction = ( + 100000 + * amplitude + / (gamma * np.pi) + * (gamma**2 / ((y - center[1]) ** 2 + gamma**2) - 1) + ) + correction += ( + 100000 + * amplitude2 + / (gamma2 * np.pi) + * (gamma2**2 / ((x - center[0]) ** 2 + gamma2**2) - 1) + ) + + else: + raise NotImplementedError + + return correction
+ + +
[docs]def normspec( + specs: np.ndarray, + smooth: bool = False, + span: int = 7, + order: int = 1, +) -> np.ndarray: + """Normalize a series of 1D signals. + + Args: + specs (np.ndarray): Collection of 1D signals. + smooth (bool, optional): Option to smooth the signals before normalization. + Defaults to False. + span (int, optional): Smoothing span parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``). Defaults to 7. + order (int, optional): Smoothing order parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``).. Defaults to 1. + + Returns: + np.ndarray: The matrix assembled from a list of maximum-normalized signals. + """ + nspec = len(specs) + specnorm = [] + + for i in range(nspec): + spec = specs[i] + + if smooth: + spec = savgol_filter(spec, span, order) + + if type(spec) in (list, tuple): + nsp = spec / max(spec) + else: + nsp = spec / spec.max() + specnorm.append(nsp) + + # Align 1D spectrum + normalized_specs = np.asarray(specnorm) + + return normalized_specs
+ + +
[docs]def find_correspondence( + sig_still: np.ndarray, + sig_mov: np.ndarray, + **kwds, +) -> np.ndarray: + """Determine the correspondence between two 1D traces by alignment using a + time-warp algorithm. + + Args: + sig_still (np.ndarray): Reference 1D signals. + sig_mov (np.ndarray): 1D signal to be aligned. + **kwds: keyword arguments for ``fastdtw.fastdtw()`` + + Returns: + np.ndarray: Pixel-wise path correspondences between two input 1D arrays + (sig_still, sig_mov). + """ + dist = kwds.pop("dist_metric", None) + rad = kwds.pop("radius", 1) + _, pathcorr = fastdtw(sig_still, sig_mov, dist=dist, radius=rad) + return np.asarray(pathcorr)
+ + +
[docs]def range_convert( + x: np.ndarray, + xrng: Tuple, + pathcorr: np.ndarray, +) -> Tuple: + """Convert value range using a pairwise path correspondence (e.g. obtained + from time warping algorithm). + + Args: + x (np.ndarray): Values of the x axis (e.g. time-of-flight values). + xrng (Tuple): Boundary value range on the x axis. + pathcorr (np.ndarray): Path correspondence between two 1D arrays in the + following form, + [(id_1_trace_1, id_1_trace_2), (id_2_trace_1, id_2_trace_2), ...] + + Returns: + Tuple: Transformed range according to the path correspondence. + """ + pathcorr = np.asarray(pathcorr) + xrange_trans = [] + + for xval in xrng: # Transform each value in the range + xind = find_nearest(xval, x) + xind_alt = find_nearest(xind, pathcorr[:, 0]) + xind_trans = pathcorr[xind_alt, 1] + xrange_trans.append(x[xind_trans]) + + return tuple(xrange_trans)
+ + +
[docs]def find_nearest(val: float, narray: np.ndarray) -> int: + """Find the value closest to a given one in a 1D array. + + Args: + val (float): Value of interest. + narray (np.ndarray): The array to look for the nearest value. + + Returns: + int: Array index of the value nearest to the given one. + """ + return int(np.argmin(np.abs(narray - val)))
+ + +
[docs]def peaksearch( + traces: np.ndarray, + tof: np.ndarray, + ranges: List[Tuple] = None, + pkwindow: int = 3, + plot: bool = False, +) -> np.ndarray: + """Detect a list of peaks in the corresponding regions of multiple spectra. + + Args: + traces (np.ndarray): Collection of 1D spectra. + tof (np.ndarray): Time-of-flight values. + ranges (List[Tuple], optional): List of ranges for peak detection in the format + [(LowerBound1, UpperBound1), (LowerBound2, UpperBound2), ....]. + Defaults to None. + pkwindow (int, optional): Window width of a peak (amounts to lookahead in + ``peakdetect1d``). Defaults to 3. + plot (bool, optional): Specify whether to display a custom plot of the peak + search results. Defaults to False. + + Returns: + np.ndarray: Collection of peak positions. + """ + pkmaxs = [] + if plot: + plt.figure(figsize=(10, 4)) + + for rng, trace in zip(ranges, traces.tolist()): + cond = (tof >= rng[0]) & (tof <= rng[1]) + trace = np.array(trace).ravel() + tofseg, trseg = tof[cond], trace[cond] + maxs, _ = peakdetect1d(trseg, tofseg, lookahead=pkwindow) + try: + pkmaxs.append(maxs[0, :]) + except IndexError: # No peak found for this range + print(f"No peak detected in range {rng}.") + raise + + if plot: + plt.plot(tof, trace, "--k", linewidth=1) + plt.plot(tofseg, trseg, linewidth=2) + plt.scatter(maxs[0, 0], maxs[0, 1], s=30) + + return np.asarray(pkmaxs)
+ + +# 1D peak detection algorithm adapted from Sixten Bergman +# https://gist.github.com/sixtenbe/1178136#file-peakdetect-py +def _datacheck_peakdetect( + x_axis: np.ndarray, + y_axis: np.ndarray, +) -> Tuple[np.ndarray, np.ndarray]: + """Input format checking for 1D peakdetect algorithm + + Args: + x_axis (np.ndarray): x-axis array + y_axis (np.ndarray): y-axis array + + Raises: + ValueError: Raised if x and y values don't have the same length. + + Returns: + Tuple[np.ndarray, np.ndarray]: Tuple of checked (x/y) arrays. + """ + + if x_axis is None: + x_axis = np.arange(len(y_axis)) + + if len(y_axis) != len(x_axis): + raise ValueError( + "Input vectors y_axis and x_axis must have same length", + ) + + # Needs to be a numpy array + y_axis = np.asarray(y_axis) + x_axis = np.asarray(x_axis) + + return x_axis, y_axis + + +
[docs]def peakdetect1d( + y_axis: np.ndarray, + x_axis: np.ndarray = None, + lookahead: int = 200, + delta: int = 0, +) -> Tuple[np.ndarray, np.ndarray]: + """Function for detecting local maxima and minima in a signal. + Discovers peaks by searching for values which are surrounded by lower + or larger values for maxima and minima respectively + + Converted from/based on a MATLAB script at: + http://billauer.co.il/peakdet.html + + Args: + y_axis (np.ndarray): A list containing the signal over which to find peaks. + x_axis (np.ndarray, optional): A x-axis whose values correspond to the y_axis + list and is used in the return to specify the position of the peaks. If + omitted an index of the y_axis is used. + lookahead (int, optional): distance to look ahead from a peak candidate to + determine if it is the actual peak + '(samples / period) / f' where '4 >= f >= 1.25' might be a good value. + Defaults to 200. + delta (int, optional): this specifies a minimum difference between a peak and + the following points, before a peak may be considered a peak. Useful + to hinder the function from picking up false peaks towards to end of + the signal. To work well delta should be set to delta >= RMSnoise * 5. + Defaults to 0. + + Raises: + ValueError: Raised if lookahead and delta are out of range. + + Returns: + Tuple[np.ndarray, np.ndarray]: Tuple of positions of the positive peaks, + positions of the negative peaks + """ + max_peaks = [] + min_peaks = [] + dump = [] # Used to pop the first hit which almost always is false + + # Check input data + x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis) + # Store data length for later use + length = len(y_axis) + + # Perform some checks + if lookahead < 1: + raise ValueError("Lookahead must be '1' or above in value") + + if not (np.ndim(delta) == 0 and delta >= 0): + raise ValueError("delta must be a positive number") + + # maxima and minima candidates are temporarily stored in + # mx and mn respectively + _min, _max = np.Inf, -np.Inf + + # Only detect peak if there is 'lookahead' amount of points after it + for index, (x, y) in enumerate( + zip(x_axis[:-lookahead], y_axis[:-lookahead]), + ): + if y > _max: + _max = y + _max_pos = x + + if y < _min: + _min = y + _min_pos = x + + # Find local maxima + if y < _max - delta and _max != np.Inf: + # Maxima peak candidate found + # look ahead in signal to ensure that this is a peak and not jitter + if y_axis[index : index + lookahead].max() < _max: + max_peaks.append([_max_pos, _max]) + dump.append(True) + # Set algorithm to only find minima now + _max = np.Inf + _min = np.Inf + + if index + lookahead >= length: + # The end is within lookahead no more peaks can be found + break + continue + # else: + # mx = ahead + # mxpos = x_axis[np.where(y_axis[index:index+lookahead]==mx)] + + # Find local minima + if y > _min + delta and _min != -np.Inf: + # Minima peak candidate found + # look ahead in signal to ensure that this is a peak and not jitter + if y_axis[index : index + lookahead].min() > _min: + min_peaks.append([_min_pos, _min]) + dump.append(False) + # Set algorithm to only find maxima now + _min = -np.Inf + _max = -np.Inf + + if index + lookahead >= length: + # The end is within lookahead no more peaks can be found + break + # else: + # mn = ahead + # mnpos = x_axis[np.where(y_axis[index:index+lookahead]==mn)] + + # Remove the false hit on the first value of the y_axis + try: + if dump[0]: + max_peaks.pop(0) + else: + min_peaks.pop(0) + del dump + + except IndexError: # When no peaks have been found + pass + + return (np.asarray(max_peaks), np.asarray(min_peaks))
+ + +
[docs]def fit_energy_calibration( + pos: Union[List[float], np.ndarray], + vals: Union[List[float], np.ndarray], + binwidth: float, + binning: int, + ref_id: int = 0, + ref_energy: float = None, + t: Union[List[float], np.ndarray] = None, + energy_scale: str = "kinetic", + verbose: bool = True, + **kwds, +) -> dict: + """Energy calibration by nonlinear least squares fitting of spectral landmarks on + a set of (energy dispersion curves (EDCs). This is done here by fitting to the + function d/(t-t0)**2. + + Args: + pos (Union[List[float], np.ndarray]): Positions of the spectral landmarks + (e.g. peaks) in the EDCs. + vals (Union[List[float], np.ndarray]): Bias voltage value associated with + each EDC. + binwidth (float): Time width of each original TOF bin in ns. + binning (int): Binning factor of the TOF values. + ref_id (int, optional): Reference dataset index. Defaults to 0. + ref_energy (float, optional): Energy value of the feature in the reference + trace (eV). required to output the calibration. Defaults to None. + t (Union[List[float], np.ndarray], optional): Array of TOF values. Required + to calculate calibration trace. Defaults to None. + energy_scale (str, optional): Direction of increasing energy scale. + + - **'kinetic'**: increasing energy with decreasing TOF. + - **'binding'**: increasing energy with increasing TOF. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to True. + **kwds: keyword arguments: + + - **t0** (float): constrains and initial values for the fit parameter t0, + corresponding to the time of flight offset. Defaults to 1e-6. + - **E0** (float): constrains and initial values for the fit parameter E0, + corresponding to the energy offset. Defaults to min(vals). + - **d** (float): constrains and initial values for the fit parameter d, + corresponding to the drift distance. Defaults to 1. + + Returns: + dict: A dictionary of fitting parameters including the following, + + - "coeffs": Fitted function coefficients. + - "axis": Fitted energy axis. + """ + vals = np.asarray(vals) + nvals = vals.size + + if ref_id >= nvals: + wn.warn( + "Reference index (refid) cannot be larger than the number of traces!\ + Reset to the largest allowed number.", + ) + ref_id = nvals - 1 + + def residual(pars, time, data, binwidth, binning, energy_scale): + model = tof2ev( + pars["d"], + pars["t0"], + binwidth, + binning, + energy_scale, + pars["E0"], + time, + ) + if data is None: + return model + return model - data + + pars = Parameters() + d_pars = kwds.pop("d", {}) + pars.add( + name="d", + value=d_pars.get("value", 1), + min=d_pars.get("min", -np.inf), + max=d_pars.get("max", np.inf), + vary=d_pars.get("vary", True), + ) + t0_pars = kwds.pop("t0", {}) + pars.add( + name="t0", + value=t0_pars.get("value", 1e-6), + min=t0_pars.get("min", -np.inf), + max=t0_pars.get( + "max", + (min(pos) - 1) * binwidth * 2**binning, + ), + vary=t0_pars.get("vary", True), + ) + E0_pars = kwds.pop("E0", {}) # pylint: disable=invalid-name + pars.add( + name="E0", + value=E0_pars.get("value", min(vals)), + min=E0_pars.get("min", -np.inf), + max=E0_pars.get("max", np.inf), + vary=E0_pars.get("vary", True), + ) + fit = Minimizer( + residual, + pars, + fcn_args=(pos, vals, binwidth, binning, energy_scale), + ) + result = fit.leastsq() + if verbose: + report_fit(result) + + # Construct the calibrating function + pfunc = partial( + tof2ev, + result.params["d"].value, + result.params["t0"].value, + binwidth, + binning, + energy_scale, + ) + + # Return results according to specification + ecalibdict = {} + ecalibdict["d"] = result.params["d"].value + ecalibdict["t0"] = result.params["t0"].value + ecalibdict["E0"] = result.params["E0"].value + ecalibdict["energy_scale"] = energy_scale + + if (ref_energy is not None) and (t is not None): + energy_offset = pfunc(-1 * ref_energy, pos[ref_id]) + ecalibdict["axis"] = pfunc(-energy_offset, t) + ecalibdict["E0"] = -energy_offset + ecalibdict["refid"] = ref_id + + return ecalibdict
+ + +
[docs]def poly_energy_calibration( + pos: Union[List[float], np.ndarray], + vals: Union[List[float], np.ndarray], + order: int = 3, + ref_id: int = 0, + ref_energy: float = None, + t: Union[List[float], np.ndarray] = None, + aug: int = 1, + method: str = "lstsq", + energy_scale: str = "kinetic", + **kwds, +) -> dict: + """Energy calibration by nonlinear least squares fitting of spectral landmarks on + a set of (energy dispersion curves (EDCs). This amounts to solving for the + coefficient vector, a, in the system of equations T.a = b. Here T is the + differential drift time matrix and b the differential bias vector, and + assuming that the energy-drift-time relationship can be written in the form, + E = sum_n (a_n * t**n) + E0 + + + Args: + pos (Union[List[float], np.ndarray]): Positions of the spectral landmarks + (e.g. peaks) in the EDCs. + vals (Union[List[float], np.ndarray]): Bias voltage value associated with + each EDC. + order (int, optional): Polynomial order of the fitting function. Defaults to 3. + ref_id (int, optional): Reference dataset index. Defaults to 0. + ref_energy (float, optional): Energy value of the feature in the reference + trace (eV). required to output the calibration. Defaults to None. + t (Union[List[float], np.ndarray], optional): Array of TOF values. Required + to calculate calibration trace. Defaults to None. + aug (int, optional): Fitting dimension augmentation + (1=no change, 2=double, etc). Defaults to 1. + method (str, optional): Method for determining the energy calibration. + + - **'lmfit'**: Energy calibration using lmfit and 1/t^2 form. + - **'lstsq'**, **'lsqr'**: Energy calibration using polynomial form.. + + Defaults to "lstsq". + energy_scale (str, optional): Direction of increasing energy scale. + + - **'kinetic'**: increasing energy with decreasing TOF. + - **'binding'**: increasing energy with increasing TOF. + + Returns: + dict: A dictionary of fitting parameters including the following, + + - "coeffs": Fitted polynomial coefficients (the a's). + - "offset": Minimum time-of-flight corresponding to a peak. + - "Tmat": the T matrix (differential time-of-flight) in the equation Ta=b. + - "bvec": the b vector (differential bias) in the fitting Ta=b. + - "axis": Fitted energy axis. + """ + vals = np.asarray(vals) + nvals = vals.size + + if ref_id >= nvals: + wn.warn( + "Reference index (refid) cannot be larger than the number of traces!\ + Reset to the largest allowed number.", + ) + ref_id = nvals - 1 + + # Top-to-bottom ordering of terms in the T matrix + termorder = np.delete(range(0, nvals, 1), ref_id) + termorder = np.tile(termorder, aug) + # Left-to-right ordering of polynomials in the T matrix + polyorder = np.linspace(order, 1, order, dtype="int") + + # Construct the T (differential drift time) matrix, Tmat = Tmain - Tsec + t_main = np.array([pos[ref_id] ** p for p in polyorder]) + # Duplicate to the same order as the polynomials + t_main = np.tile(t_main, (aug * (nvals - 1), 1)) + + t_sec = [] + + for term in termorder: + t_sec.append([pos[term] ** p for p in polyorder]) + + t_mat = t_main - np.asarray(t_sec) + + # Construct the b vector (differential bias) + bvec = vals[ref_id] - np.delete(vals, ref_id) + bvec = np.tile(bvec, aug) + + # Solve for the a vector (polynomial coefficients) using least squares + if method == "lstsq": + sol = lstsq(t_mat, bvec, rcond=None) + elif method == "lsqr": + sol = lsqr(t_mat, bvec, **kwds) + poly_a = sol[0] + + # Construct the calibrating function + pfunc = partial(tof2evpoly, poly_a) + + # Return results according to specification + ecalibdict = {} + ecalibdict["offset"] = np.asarray(pos).min() + ecalibdict["coeffs"] = poly_a + ecalibdict["Tmat"] = t_mat + ecalibdict["bvec"] = bvec + ecalibdict["energy_scale"] = energy_scale + + if ref_energy is not None and t is not None: + energy_offset = pfunc(-1 * ref_energy, pos[ref_id]) + ecalibdict["axis"] = pfunc(-energy_offset, t) + ecalibdict["E0"] = -energy_offset + ecalibdict["refid"] = ref_id + + return ecalibdict
+ + +
[docs]def tof2ev( + tof_distance: float, + time_offset: float, + binwidth: float, + binning: int, + energy_scale: str, + energy_offset: float, + t: float, +) -> float: + """(d/(t-t0))**2 expression of the time-of-flight to electron volt + conversion formula. + + Args: + tof_distance (float): Drift distance in meter. + time_offset (float): time offset in ns. + binwidth (float): Time width of each original TOF bin in ns. + binning (int): Binning factor of the TOF values. + energy_scale (str, optional): Direction of increasing energy scale. + + - **'kinetic'**: increasing energy with decreasing TOF. + - **'binding'**: increasing energy with increasing TOF. + + energy_offset (float): Energy offset in eV. + t (float): TOF value in bin number. + + Returns: + float: Converted energy in eV + """ + sign = 1 if energy_scale == "kinetic" else -1 + + # m_e/2 [eV] bin width [s] + energy = ( + 2.84281e-12 * sign * (tof_distance / (t * binwidth * 2**binning - time_offset)) ** 2 + + energy_offset + ) + + return energy
+ + +
[docs]def tof2evpoly( + poly_a: Union[List[float], np.ndarray], + energy_offset: float, + t: float, +) -> float: + """Polynomial approximation of the time-of-flight to electron volt + conversion formula. + + Args: + poly_a (Union[List[float], np.ndarray]): Polynomial coefficients. + energy_offset (float): Energy offset in eV. + t (float): TOF value in bin number. + + Returns: + float: Converted energy. + """ + odr = len(poly_a) # Polynomial order + poly_a = poly_a[::-1] + energy = 0.0 + + for i, order in enumerate(range(1, odr + 1)): + energy += poly_a[i] * t**order + energy += energy_offset + + return energy
+ + +
[docs]def tof2ns( + binwidth: float, + binning: int, + t: float, +) -> float: + """Converts the time-of-flight steps to time-of-flight in nanoseconds. + + designed for use with dask.dataframe.DataFrame.map_partitions. + + Args: + binwidth (float): Time step size in seconds. + binning (int): Binning of the time-of-flight steps. + t (float): TOF value in bin number. + Returns: + float: Converted time in nanoseconds. + """ + val = t * 1e9 * binwidth * 2.0**binning + return val
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/calibrator/momentum.html b/sed/2.1.0/_modules/sed/calibrator/momentum.html new file mode 100644 index 0000000..539c01e --- /dev/null +++ b/sed/2.1.0/_modules/sed/calibrator/momentum.html @@ -0,0 +1,2658 @@ + + + + + + + + + + sed.calibrator.momentum — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.calibrator.momentum

+"""sed.calibrator.momentum module. Code for momentum calibration and distortion
+correction. Mostly ported from https://github.com/mpes-kit/mpes.
+"""
+import itertools as it
+from copy import deepcopy
+from datetime import datetime
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Tuple
+from typing import Union
+
+import bokeh.palettes as bp
+import bokeh.plotting as pbk
+import dask.dataframe
+import ipywidgets as ipw
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+import scipy.ndimage as ndi
+import xarray as xr
+from bokeh.colors import RGB
+from bokeh.io import output_notebook
+from bokeh.palettes import Category10 as ColorCycle
+from IPython.display import display
+from joblib import delayed
+from joblib import Parallel
+from matplotlib import cm
+from numpy.linalg import norm
+from scipy.interpolate import griddata
+from scipy.ndimage import map_coordinates
+from symmetrize import pointops as po
+from symmetrize import sym
+from symmetrize import tps
+
+
+
[docs]class MomentumCorrector: + """ + Momentum distortion correction and momentum calibration workflow functions. + + Args: + data (Union[xr.DataArray, np.ndarray], optional): Multidimensional hypervolume + containing the data. Defaults to None. + bin_ranges (List[Tuple], optional): Binning ranges of the data volume, if + provided as np.ndarray. Defaults to None. + rotsym (int, optional): Rotational symmetry of the data. Defaults to 6. + config (dict, optional): Config dictionary. Defaults to None. + """ + + def __init__( + self, + data: Union[xr.DataArray, np.ndarray] = None, + bin_ranges: List[Tuple] = None, + rotsym: int = 6, + config: dict = None, + ): + """Constructor of the MomentumCorrector class. + + Args: + data (Union[xr.DataArray, np.ndarray], optional): Multidimensional + hypervolume containing the data. Defaults to None. + bin_ranges (List[Tuple], optional): Binning ranges of the data volume, + if provided as np.ndarray. Defaults to None. + rotsym (int, optional): Rotational symmetry of the data. Defaults to 6. + config (dict, optional): Config dictionary. Defaults to None. + """ + if config is None: + config = {} + + self._config = config + + self.image: np.ndarray = None + self.img_ndim: int = None + self.slice: np.ndarray = None + self.slice_corrected: np.ndarray = None + self.slice_transformed: np.ndarray = None + self.bin_ranges: List[Tuple] = self._config["momentum"].get("bin_ranges", []) + + if data is not None: + self.load_data(data=data, bin_ranges=bin_ranges) + + self.detector_ranges = self._config["momentum"]["detector_ranges"] + + self.rotsym = int(rotsym) + self.rotsym_angle = int(360 / self.rotsym) + self.arot = np.array([0] + [self.rotsym_angle] * (self.rotsym - 1)) + self.ascale = np.array([1.0] * self.rotsym) + self.peaks: np.ndarray = None + self.include_center: bool = False + self.use_center: bool = False + self.pouter: np.ndarray = None + self.pcent: Tuple[float, ...] = None + self.pouter_ord: np.ndarray = None + self.prefs: np.ndarray = None + self.ptargs: np.ndarray = None + self.csm_original: float = np.nan + self.mdist: float = np.nan + self.mcvdist: float = np.nan + self.mvvdist: float = np.nan + self.cvdist: np.ndarray = np.array(np.nan) + self.vvdist: np.ndarray = np.array(np.nan) + self.rdeform_field: np.ndarray = None + self.cdeform_field: np.ndarray = None + self.rdeform_field_bkp: np.ndarray = None + self.cdeform_field_bkp: np.ndarray = None + self.inverse_dfield: np.ndarray = None + self.dfield_updated: bool = False + self.transformations: Dict[str, Any] = self._config["momentum"].get("transformations", {}) + self.correction: Dict[str, Any] = self._config["momentum"].get("correction", {}) + self.adjust_params: Dict[str, Any] = {} + self.calibration: Dict[str, Any] = self._config["momentum"].get("calibration", {}) + + self.x_column = self._config["dataframe"]["x_column"] + self.y_column = self._config["dataframe"]["y_column"] + self.corrected_x_column = self._config["dataframe"]["corrected_x_column"] + self.corrected_y_column = self._config["dataframe"]["corrected_y_column"] + self.kx_column = self._config["dataframe"]["kx_column"] + self.ky_column = self._config["dataframe"]["ky_column"] + + self._state: int = 0 + + @property + def features(self) -> dict: + """Dictionary of detected features for the symmetrization process. + ``self.features`` is a derived attribute from existing ones. + + Returns: + dict: Dict containing features "verts" and "center". + """ + feature_dict = { + "verts": np.asarray(self.__dict__.get("pouter_ord", [])), + "center": np.asarray(self.__dict__.get("pcent", [])), + } + + return feature_dict + + @property + def symscores(self) -> dict: + """Dictionary of symmetry-related scores. + + Returns: + dict: Dictionary containing symmetry scores. + """ + sym_dict = { + "csm_original": self.__dict__.get("csm_original", ""), + "csm_current": self.__dict__.get("csm_current", ""), + "arm_original": self.__dict__.get("arm_original", ""), + "arm_current": self.__dict__.get("arm_current", ""), + } + + return sym_dict + +
[docs] def load_data( + self, + data: Union[xr.DataArray, np.ndarray], + bin_ranges: List[Tuple] = None, + ): + """Load binned data into the momentum calibrator class + + Args: + data (Union[xr.DataArray, np.ndarray]): + 2D or 3D data array, either as np.ndarray or xr.DataArray. + bin_ranges (List[Tuple], optional): + Binning ranges. Needs to be provided in case the data are given + as np.ndarray. Otherwise, they are determined from the coords of + the xr.DataArray. Defaults to None. + + Raises: + ValueError: Raised if the dimensions of the input data do not fit. + """ + if isinstance(data, xr.DataArray): + self.image = np.squeeze(data.data) + self.bin_ranges = [] + for axis in data.coords: + self.bin_ranges.append( + ( + data.coords[axis][0].values, + 2 * data.coords[axis][-1].values - data.coords[axis][-2].values, # endpoint + ), + ) + else: + assert bin_ranges is not None + self.image = np.squeeze(data) + self.bin_ranges = bin_ranges + + self.img_ndim = self.image.ndim + if (self.img_ndim > 3) or (self.img_ndim < 2): + raise ValueError("The input image dimension need to be 2 or 3!") + if self.img_ndim == 2: + self.slice = self.image + + if self.slice is not None: + self.slice_corrected = self.slice_transformed = self.slice
+ +
[docs] def select_slicer( + self, + plane: int = 0, + width: int = 5, + axis: int = 2, + apply: bool = False, + ): + """Interactive panel to select (hyper)slice from a (hyper)volume. + + Args: + plane (int, optional): initial value of the plane slider. Defaults to 0. + width (int, optional): initial value of the width slider. Defaults to 5. + axis (int, optional): Axis along which to slice the image. Defaults to 2. + apply (bool, optional): Option to directly apply the values and select the + slice. Defaults to False. + """ + matplotlib.use("module://ipympl.backend_nbagg") + + assert self.img_ndim == 3 + selector = slice(plane, plane + width) + image = np.moveaxis(self.image, axis, 0) + try: + img_slice = image[selector, ...].sum(axis=0) + except AttributeError: + img_slice = image[selector, ...] + + fig, ax = plt.subplots(1, 1) + img = ax.imshow(img_slice.T, origin="lower", cmap="terrain_r") + + def update(plane: int, width: int): + selector = slice(plane, plane + width) + try: + img_slice = image[selector, ...].sum(axis=0) + except AttributeError: + img_slice = image[selector, ...] + img.set_data(img_slice.T) + axmin = np.min(img_slice, axis=(0, 1)) + axmax = np.max(img_slice, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + ax.set_title(f"Plane[{plane}:{plane+width}]") + fig.canvas.draw_idle() + + update(plane, width) + + plane_slider = ipw.IntSlider( + value=plane, + min=0, + max=self.image.shape[2] - width, + step=1, + ) + width_slider = ipw.IntSlider(value=width, min=1, max=20, step=1) + + ipw.interact( + update, + plane=plane_slider, + width=width_slider, + ) + + def apply_fun(apply: bool): # noqa: ARG001 + start = plane_slider.value + stop = plane_slider.value + width_slider.value + + selector = slice( + start, + stop, + ) + self.select_slice(selector=selector, axis=axis) + + img.set_data(self.slice.T) + axmin = np.min(self.slice, axis=(0, 1)) + axmax = np.max(self.slice, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + ax.set_title( + f"Plane[{start}:{stop}]", + ) + fig.canvas.draw_idle() + + plane_slider.close() + width_slider.close() + apply_button.close() + + apply_button = ipw.Button(description="apply") + display(apply_button) + apply_button.on_click(apply_fun) + + plt.show() + + if apply: + apply_fun(True)
+ +
[docs] def select_slice( + self, + selector: Union[slice, List[int], int], + axis: int = 2, + ): + """Select (hyper)slice from a (hyper)volume. + + Args: + selector (Union[slice, List[int], int]): + Selector along the specified axis to extract the slice (image). Use + the construct slice(start, stop, step) to select a range of images + and sum them. Use an integer to specify only a particular slice. + axis (int, optional): Axis along which to select the image. Defaults to 2. + + Raises: + ValueError: Raised if self.image is already 2D. + """ + if self.img_ndim > 2: + image = np.moveaxis(self.image, axis, 0) + try: + self.slice = image[selector, ...].sum(axis=0) + except AttributeError: + self.slice = image[selector, ...] + + if self.slice is not None: + self.slice_corrected = self.slice_transformed = self.slice + + elif self.img_ndim == 2: + raise ValueError("Input image dimension is already 2!")
+ +
[docs] def add_features( + self, + features: np.ndarray, + direction: str = "ccw", + rotsym: int = 6, + symscores: bool = True, + **kwds, + ): + """Add features as reference points provided as np.ndarray. If provided, + detects the center of the points and orders the points. + + Args: + features (np.ndarray): + Array of landmarks, possibly including a center peak. Its shape should + be (n,2), where n is equal to the rotation symmetry, or the rotation + symmetry+1, if the center is included. + direction (str, optional): + Direction for ordering the points. Defaults to "ccw". + symscores (bool, optional): + Option to calculate symmetry scores. Defaults to False. + **kwds: Keyword arguments. + + - **symtype** (str): Type of symmetry scores to calculate + if symscores is True. Defaults to "rotation". + + Raises: + ValueError: Raised if the number of points does not match the rotsym. + """ + self.rotsym = int(rotsym) + self.rotsym_angle = int(360 / self.rotsym) + self.arot = np.array([0] + [self.rotsym_angle] * (self.rotsym - 1)) + self.ascale = np.array([1.0] * self.rotsym) + + if features.shape[0] == self.rotsym: # assume no center present + self.pcent, self.pouter = po.pointset_center( + features, + method="centroid", + ) + self.include_center = False + elif features.shape[0] == self.rotsym + 1: # assume center included + self.pcent, self.pouter = po.pointset_center( + features, + method="centroidnn", + ) + self.include_center = True + else: + raise ValueError( + f"Found {features.shape[0]} points, ", + f"but {self.rotsym} or {self.rotsym+1} (incl.center) required.", + ) + if isinstance(self.pcent, np.ndarray): + self.pcent = tuple(val.item() for val in self.pcent) + # Order the point landmarks + self.pouter_ord = po.pointset_order( + self.pouter, + direction=direction, + ) + + # Calculate geometric distances + if self.pcent is not None: + self.calc_geometric_distances() + + if symscores is True: + symtype = kwds.pop("symtype", "rotation") + self.csm_original = self.calc_symmetry_scores(symtype=symtype) + + if self.rotsym == 6 and self.pcent is not None: + self.mdist = (self.mcvdist + self.mvvdist) / 2 + self.mcvdist = self.mdist + self.mvvdist = self.mdist
+ +
[docs] def feature_extract( + self, + image: np.ndarray = None, + direction: str = "ccw", + feature_type: str = "points", + rotsym: int = 6, + symscores: bool = True, + **kwds, + ): + """Extract features from the selected 2D slice. + Currently only point feature detection is implemented. + + Args: + image (np.ndarray, optional): + The (2D) image slice to extract features from. + Defaults to self.slice + direction (str, optional): + The circular direction to reorder the features in ('cw' or 'ccw'). + Defaults to "ccw". + feature_type (str, optional): + The type of features to extract. Defaults to "points". + rotsym (int, optional): Rotational symmetry of the data. Defaults to 6. + symscores (bool, optional): + Option for calculating symmetry scores. Defaults to True. + **kwds: + Extra keyword arguments for ``symmetrize.pointops.peakdetect2d()``. + + Raises: + NotImplementedError: + Raised for undefined feature_types. + """ + if image is None: + if self.slice is not None: + image = self.slice + else: + raise ValueError("No image loaded for feature extraction!") + + if feature_type == "points": + # Detect the point landmarks + self.peaks = po.peakdetect2d(image, **kwds) + + self.add_features( + features=self.peaks, + direction=direction, + rotsym=rotsym, + symscores=symscores, + **kwds, + ) + else: + raise NotImplementedError
+ +
[docs] def feature_select( + self, + image: np.ndarray = None, + features: np.ndarray = None, + include_center: bool = True, + rotsym: int = 6, + apply: bool = False, + **kwds, + ): + """Extract features from the selected 2D slice. + Currently only point feature detection is implemented. + + Args: + image (np.ndarray, optional): + The (2D) image slice to extract features from. + Defaults to self.slice + include_center (bool, optional): + Option to include the image center/centroid in the registration + process. Defaults to True. + features (np.ndarray, optional): + Array of landmarks, possibly including a center peak. Its shape should + be (n,2), where n is equal to the rotation symmetry, or the rotation + symmetry+1, if the center is included. + If omitted, an array filled with zeros is generated. + rotsym (int, optional): Rotational symmetry of the data. Defaults to 6. + apply (bool, optional): Option to directly store the features in the class. + Defaults to False. + **kwds: + Extra keyword arguments for ``symmetrize.pointops.peakdetect2d()``. + + Raises: + ValueError: If no valid image is found from which to ge the coordinates. + """ + matplotlib.use("module://ipympl.backend_nbagg") + if image is None: + if self.slice is not None: + image = self.slice + else: + raise ValueError("No valid image loaded!") + + fig, ax = plt.subplots(1, 1) + ax.imshow(image.T, origin="lower", cmap="terrain_r") + + if features is None: + features = np.zeros((rotsym + (include_center), 2)) + + markers = [] + for peak in features: + markers.append(ax.plot(peak[0], peak[1], "o")[0]) + + def update_point_no( + point_no: int, + ): + fig.canvas.draw_idle() + + point_x = features[point_no][0] + point_y = features[point_no][1] + + point_input_x.value = point_x + point_input_y.value = point_y + + def update_point_pos( + point_x: float, + point_y: float, + ): + fig.canvas.draw_idle() + point_no = point_no_input.value + features[point_no][0] = point_x + features[point_no][1] = point_y + + markers[point_no].set_xdata(point_x) + markers[point_no].set_ydata(point_y) + + point_no_input = ipw.Dropdown( + options=range(features.shape[0]), + description="Point:", + ) + + point_input_x = ipw.FloatText(features[0][0]) + point_input_y = ipw.FloatText(features[0][1]) + ipw.interact( + update_point_no, + point_no=point_no_input, + ) + ipw.interact( + update_point_pos, + point_y=point_input_y, + point_x=point_input_x, + ) + + def onclick(event): + point_input_x.value = event.xdata + point_input_y.value = event.ydata + point_no_input.value = (point_no_input.value + 1) % features.shape[0] + + cid = fig.canvas.mpl_connect("button_press_event", onclick) + + def apply_func(apply: bool): # noqa: ARG001 + fig.canvas.mpl_disconnect(cid) + + point_no_input.close() + point_input_x.close() + point_input_y.close() + apply_button.close() + + fig.canvas.draw_idle() + + self.add_features( + features=features, + rotsym=rotsym, + **kwds, + ) + + apply_button = ipw.Button(description="apply") + display(apply_button) + apply_button.on_click(apply_func) + + if apply: + apply_func(True) + + plt.show()
+ +
[docs] def calc_geometric_distances(self) -> None: + """Calculate geometric distances involving the center and the vertices. + Distances calculated include center-vertex and nearest-neighbor vertex-vertex + distances. + """ + self.cvdist = po.cvdist(self.pouter_ord, self.pcent) + self.mcvdist = self.cvdist.mean() + self.vvdist = po.vvdist(self.pouter_ord) + self.mvvdist = self.vvdist.mean()
+ +
[docs] def calc_symmetry_scores(self, symtype: str = "rotation") -> float: + """Calculate the symmetry scores from geometric quantities. + + Args: + symtype (str, optional): Type of symmetry score to calculate. + Defaults to "rotation". + + Returns: + float: Calculated symmetry score. + """ + csm = po.csm( + self.pcent, + self.pouter_ord, + rotsym=self.rotsym, + type=symtype, + ) + + return csm
+ +
[docs] def spline_warp_estimate( + self, + image: np.ndarray = None, + use_center: bool = None, + fixed_center: bool = True, + interp_order: int = 1, + ascale: Union[float, list, tuple, np.ndarray] = None, + verbose: bool = True, + **kwds, + ) -> np.ndarray: + """Estimate the spline deformation field using thin plate spline registration. + + Args: + image (np.ndarray, optional): + 2D array. Image slice to be corrected. Defaults to self.slice. + use_center (bool, optional): + Option to use the image center/centroid in the registration + process. Defaults to config value, or True. + fixed_center (bool, optional): + Option to have a fixed center during registration-based + symmetrization. Defaults to True. + interp_order (int, optional): + Order of interpolation (see ``scipy.ndimage.map_coordinates()``). + Defaults to 1. + ascale: (Union[float, np.ndarray], optional): Scale parameter determining a relative + scale for each symmetry feature. If provided as single float, rotsym has to be 4. + This parameter describes the relative scaling between the two orthogonal symmetry + directions (for an orthorhombic system). This requires the correction points to be + located along the principal axes (X/Y points of the Brillouin zone). Otherwise, an + array with ``rotsym`` elements is expected, containing relative scales for each + feature. Defaults to an array of equal scales. + verbose (bool, optional): Option to report the used landmarks for correction. + Defaults to True. + **kwds: keyword arguments: + + - **landmarks**: (list/array): Landmark positions (row, column) used + for registration. Defaults to self.pouter_ord + - **targets**: (list/array): Target positions (row, column) used for + registration. If empty, it will be generated by + ``symmetrize.rotVertexGenerator()``. + - **new_centers**: (dict): User-specified center positions for the + reference and target sets. {'lmkcenter': (row, col), + 'targcenter': (row, col)} + Returns: + np.ndarray: The corrected image. + """ + if image is None: + if self.slice is not None: + image = self.slice + else: + image = np.zeros(self._config["momentum"]["bins"][0:2]) + self.bin_ranges = self._config["momentum"]["ranges"] + + if self.pouter_ord is None: + if self.pouter is not None: + self.pouter_ord = po.pointset_order(self.pouter) + self.correction["creation_date"] = datetime.now().timestamp() + self.correction["creation_date"] = datetime.now().timestamp() + else: + try: + features = np.asarray( + self.correction["feature_points"], + ) + rotsym = self.correction["rotation_symmetry"] + include_center = self.correction["include_center"] + if not include_center and len(features) > rotsym: + features = features[:rotsym, :] + ascale = self.correction.get("ascale", None) + if ascale is not None: + ascale = np.asarray(ascale) + + if verbose: + if "creation_date" in self.correction: + datestring = datetime.fromtimestamp( + self.correction["creation_date"], + ).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print( + "No landmarks defined, using momentum correction parameters " + f"generated on {datestring}", + ) + else: + print( + "No landmarks defined, using momentum correction parameters " + "from config.", + ) + except KeyError as exc: + raise ValueError( + "No valid landmarks defined, and no landmarks found in configuration!", + ) from exc + + self.add_features(features=features, rotsym=rotsym, include_center=include_center) + + else: + self.correction["creation_date"] = datetime.now().timestamp() + + if ascale is not None: + if isinstance(ascale, (int, float, np.floating, np.integer)): + if self.rotsym != 4: + raise ValueError( + "Providing ascale as scalar number is only valid for 'rotsym'==4.", + ) + self.ascale = np.array([1.0, ascale, 1.0, ascale]) + elif isinstance(ascale, (tuple, list, np.ndarray)): + if len(ascale) != len(self.ascale): + raise ValueError( + f"ascale needs to be of length 'rotsym', but has length {len(ascale)}.", + ) + self.ascale = np.asarray(ascale) + else: + raise TypeError( + ( + "ascale needs to be a single number or a list/tuple/np.ndarray of length ", + f"'rotsym' ({self.rotsym})!", + ), + ) + + if use_center is None: + try: + use_center = self.correction["use_center"] + except KeyError: + use_center = True + self.use_center = use_center + + self.prefs = kwds.pop("landmarks", self.pouter_ord) + self.ptargs = kwds.pop("targets", []) + + # Generate the target point set + if not self.ptargs: + self.ptargs = sym.rotVertexGenerator( + self.pcent, + fixedvertex=self.pouter_ord[0, :], + arot=self.arot, + direction=-1, + scale=self.ascale, + ret="all", + )[1:, :] + + if use_center is True: + # Use center of image pattern in the registration-based symmetrization + if fixed_center is True: + # Add the same center to both the reference and target sets + + self.prefs = np.column_stack((self.prefs.T, self.pcent)).T + self.ptargs = np.column_stack((self.ptargs.T, self.pcent)).T + + else: # Add different centers to the reference and target sets + newcenters = kwds.pop("new_centers", {}) + self.prefs = np.column_stack( + (self.prefs.T, newcenters["lmkcenter"]), + ).T + self.ptargs = np.column_stack( + (self.ptargs.T, newcenters["targcenter"]), + ).T + + # Non-iterative estimation of deformation field + corrected_image, splinewarp = tps.tpsWarping( + self.prefs, + self.ptargs, + image, + None, + interp_order, + ret="all", + **kwds, + ) + + self.reset_deformation(image=image, coordtype="cartesian") + + self.update_deformation( + splinewarp[0], + splinewarp[1], + ) + + # save backup copies to reset transformations + self.rdeform_field_bkp = self.rdeform_field + self.cdeform_field_bkp = self.cdeform_field + + self.correction["outer_points"] = self.pouter_ord + self.correction["center_point"] = np.asarray(self.pcent) + self.correction["reference_points"] = self.prefs + self.correction["target_points"] = self.ptargs + self.correction["rotation_symmetry"] = self.rotsym + self.correction["use_center"] = self.use_center + self.correction["include_center"] = self.include_center + if self.include_center: + self.correction["feature_points"] = np.concatenate( + (self.pouter_ord, np.asarray([self.pcent])), + ) + else: + self.correction["feature_points"] = self.pouter_ord + self.correction["ascale"] = self.ascale + + if self.slice is not None: + self.slice_corrected = corrected_image + + if verbose: + print("Calculated thin spline correction based on the following landmarks:") + print(f"pouter: {self.pouter}") + if use_center: + print(f"pcent: {self.pcent}") + + return corrected_image
+ +
[docs] def apply_correction( + self, + image: np.ndarray, + axis: int, + dfield: np.ndarray = None, + ) -> np.ndarray: + """Apply a 2D transform to a stack of 2D images (3D) along a specific axis. + + Args: + image (np.ndarray): Image which to apply the transformation to + axis (int): Axis for slice selection. + dfield (np.ndarray, optional): row and column deformation field. + Defaults to [self.rdeform_field, self.cdeformfield]. + + Returns: + np.ndarray: The corrected image. + """ + if dfield is None: + dfield = np.asarray([self.rdeform_field, self.cdeform_field]) + + image_corrected = sym.applyWarping( + image, + axis, + warptype="deform_field", + dfield=dfield, + ) + + return image_corrected
+ +
[docs] def reset_deformation(self, **kwds): + """Reset the deformation field. + + Args: + **kwds: keyword arguments: + + - **image**: the image to base the deformation fields on. Its sizes are + used. Defaults to self.slice + - **coordtype**: The coordinate system to use. Defaults to 'cartesian'. + """ + image = kwds.pop("image", self.slice) + coordtype = kwds.pop("coordtype", "cartesian") + coordmat = sym.coordinate_matrix_2D( + image, + coordtype=coordtype, + stackaxis=0, + ).astype("float64") + + self.rdeform_field = coordmat[1, ...] + self.cdeform_field = coordmat[0, ...] + + self.dfield_updated = True
+ +
[docs] def update_deformation(self, rdeform: np.ndarray, cdeform: np.ndarray): + """Update the class deformation field by applying the provided column/row + deformation fields. + + Parameters: + rdeform (np.ndarray): 2D array of row-ordered deformation field. + cdeform (np.ndarray): 2D array of column-ordered deformation field. + """ + self.rdeform_field = ndi.map_coordinates( + self.rdeform_field, + [rdeform, cdeform], + order=1, + cval=np.nan, + ) + self.cdeform_field = ndi.map_coordinates( + self.cdeform_field, + [rdeform, cdeform], + order=1, + cval=np.nan, + ) + + self.dfield_updated = True
+ +
[docs] def coordinate_transform( + self, + transform_type: str, + keep: bool = False, + interp_order: int = 1, + mapkwds: dict = None, + **kwds, + ) -> np.ndarray: + """Apply a pixel-wise coordinate transform to the image + by means of the deformation field. + + Args: + transform_type (str): Type of deformation to apply to image slice. Possible + values are: + + - translation. + - rotation. + - rotation_auto. + - scaling. + - scaling_auto. + - homography. + + keep (bool, optional): Option to keep the specified coordinate transform in + the class. Defaults to False. + interp_order (int, optional): Interpolation order for filling in missed + pixels. Defaults to 1. + mapkwds (dict, optional): Additional arguments passed to + ``scipy.ndimage.map_coordinates()``. Defaults to None. + **kwds: keyword arguments. + Additional arguments in specific deformation field. + See ``symmetrize.sym`` module. + Returns: + np.ndarray: The corrected image. + """ + if mapkwds is None: + mapkwds = {} + + image = kwds.pop("image", self.slice) + stackax = kwds.pop("stackaxis", 0) + coordmat = sym.coordinate_matrix_2D( + image, + coordtype="homogeneous", + stackaxis=stackax, + ) + + if transform_type == "translation": + if "xtrans" in kwds and "ytrans" in kwds: + tmp = kwds["ytrans"] + kwds["ytrans"] = kwds["xtrans"] + kwds["xtrans"] = tmp + + rdisp, cdisp = sym.translationDF( + coordmat, + stackaxis=stackax, + ret="displacement", + **kwds, + ) + elif transform_type == "rotation": + rdisp, cdisp = sym.rotationDF( + coordmat, + stackaxis=stackax, + ret="displacement", + **kwds, + ) + elif transform_type == "rotation_auto": + center = kwds.pop("center", self.pcent) + # Estimate the optimal rotation angle using intensity symmetry + angle_auto, _ = sym.sym_pose_estimate( + image / image.max(), + center=center, + **kwds, + ) + self.adjust_params = dictmerge( + self.adjust_params, + {"center": center, "angle": angle_auto}, + ) + rdisp, cdisp = sym.rotationDF( + coordmat, + stackaxis=stackax, + ret="displacement", + angle=angle_auto, + ) + elif transform_type == "scaling": + rdisp, cdisp = sym.scalingDF( + coordmat, + stackaxis=stackax, + ret="displacement", + **kwds, + ) + elif transform_type == "scaling_auto": # Compare scaling to a reference image + pass + elif transform_type == "shearing": + rdisp, cdisp = sym.shearingDF( + coordmat, + stackaxis=stackax, + ret="displacement", + **kwds, + ) + elif transform_type == "homography": + transform = kwds.pop("transform", np.eye(3)) + rdisp, cdisp = sym.compose_deform_field( + coordmat, + mat_transform=transform, + stackaxis=stackax, + ret="displacement", + **kwds, + ) + + # Compute deformation field + if stackax == 0: + rdeform, cdeform = ( + coordmat[1, ...] + rdisp, + coordmat[0, ...] + cdisp, + ) + elif stackax == -1: + rdeform, cdeform = ( + coordmat[..., 1] + rdisp, + coordmat[..., 0] + cdisp, + ) + + # Resample image in the deformation field + if image is self.slice: # resample using all previous displacement fields + total_rdeform = ndi.map_coordinates( + self.rdeform_field, + [rdeform, cdeform], + order=1, + ) + total_cdeform = ndi.map_coordinates( + self.cdeform_field, + [rdeform, cdeform], + order=1, + ) + slice_transformed = ndi.map_coordinates( + image, + [total_rdeform, total_cdeform], + order=interp_order, + **mapkwds, + ) + self.slice_transformed = slice_transformed + else: + # if external image is provided, apply only the new additional transformation + slice_transformed = ndi.map_coordinates( + image, + [rdeform, cdeform], + order=interp_order, + **mapkwds, + ) + + # Combine deformation fields + if keep is True: + self.update_deformation( + rdeform, + cdeform, + ) + self.adjust_params["applied"] = True + self.adjust_params = dictmerge(self.adjust_params, kwds) + + return slice_transformed
+ +
[docs] def pose_adjustment( + self, + transformations: Dict[str, Any] = None, + apply: bool = False, + reset: bool = True, + verbose: bool = True, + **kwds, + ): + """Interactive panel to adjust transformations that are applied to the image. + Applies first a scaling, next a x/y translation, and last a rotation around + the center of the image (pixel 256/256). + + Args: + transformations (dict, optional): Dictionary with transformations. + Defaults to self.transformations or config["momentum"]["transformations"]. + apply (bool, optional): + Option to directly apply the provided transformations. + Defaults to False. + reset (bool, optional): + Option to reset the correction before transformation. Defaults to True. + verbose (bool, optional): + Option to report the performed transformations. Defaults to True. + **kwds: Keyword parameters defining defaults for the transformations: + + - **scale** (float): Initial value of the scaling slider. + - **xtrans** (float): Initial value of the xtrans slider. + - **ytrans** (float): Initial value of the ytrans slider. + - **angle** (float): Initial value of the angle slider. + """ + matplotlib.use("module://ipympl.backend_nbagg") + if self.slice_corrected is None or not self.slice_corrected.any(): + if self.slice is None or not self.slice.any(): + self.slice = np.zeros(self._config["momentum"]["bins"][0:2]) + source_image = self.slice + plot = False + else: + source_image = self.slice_corrected + plot = True + + transformed_image = source_image + + if reset: + if self.rdeform_field_bkp is not None and self.cdeform_field_bkp is not None: + self.rdeform_field = self.rdeform_field_bkp + self.cdeform_field = self.cdeform_field_bkp + else: + self.reset_deformation() + + center = self._config["momentum"]["center_pixel"] + if plot: + fig, ax = plt.subplots(1, 1) + img = ax.imshow(transformed_image.T, origin="lower", cmap="terrain_r") + ax.axvline(x=center[0]) + ax.axhline(y=center[1]) + + if transformations is None: + transformations = deepcopy(self.transformations) + + if len(kwds) > 0: + for key, value in kwds.items(): + transformations[key] = value + + elif "creation_date" in transformations and verbose: + datestring = datetime.fromtimestamp(transformations["creation_date"]).strftime( + "%m/%d/%Y, %H:%M:%S", + ) + print(f"Using transformation parameters generated on {datestring}") + + def update(scale: float, xtrans: float, ytrans: float, angle: float): + transformed_image = source_image + if scale != 1: + transformations["scale"] = scale + transformed_image = self.coordinate_transform( + image=transformed_image, + transform_type="scaling", + xscale=scale, + yscale=scale, + ) + if xtrans != 0: + transformations["xtrans"] = xtrans + if ytrans != 0: + transformations["ytrans"] = ytrans + if xtrans != 0 or ytrans != 0: + transformed_image = self.coordinate_transform( + image=transformed_image, + transform_type="translation", + xtrans=xtrans, + ytrans=ytrans, + ) + if angle != 0: + transformations["angle"] = angle + transformed_image = self.coordinate_transform( + image=transformed_image, + transform_type="rotation", + angle=angle, + center=center, + ) + if plot: + img.set_data(transformed_image.T) + axmin = np.min(transformed_image, axis=(0, 1)) + axmax = np.max(transformed_image, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + fig.canvas.draw_idle() + + update( + scale=transformations.get("scale", 1), + xtrans=transformations.get("xtrans", 0), + ytrans=transformations.get("ytrans", 0), + angle=transformations.get("angle", 0), + ) + + scale_slider = ipw.FloatSlider( + value=transformations.get("scale", 1), + min=0.8, + max=1.2, + step=0.01, + ) + xtrans_slider = ipw.FloatSlider( + value=transformations.get("xtrans", 0), + min=-200, + max=200, + step=1, + ) + ytrans_slider = ipw.FloatSlider( + value=transformations.get("ytrans", 0), + min=-200, + max=200, + step=1, + ) + angle_slider = ipw.FloatSlider( + value=transformations.get("angle", 0), + min=-180, + max=180, + step=1, + ) + results_box = ipw.Output() + ipw.interact( + update, + scale=scale_slider, + xtrans=xtrans_slider, + ytrans=ytrans_slider, + angle=angle_slider, + ) + + def apply_func(apply: bool): # noqa: ARG001 + if transformations.get("scale", 1) != 1: + self.coordinate_transform( + transform_type="scaling", + xscale=transformations["scale"], + yscale=transformations["scale"], + keep=True, + ) + if verbose: + with results_box: + print(f"Applied scaling with scale={transformations['scale']}.") + if transformations.get("xtrans", 0) != 0 or transformations.get("ytrans", 0) != 0: + self.coordinate_transform( + transform_type="translation", + xtrans=transformations.get("xtrans", 0), + ytrans=transformations.get("ytrans", 0), + keep=True, + ) + if verbose: + with results_box: + print( + f"Applied translation with (xtrans={transformations.get('xtrans', 0)},", + f"ytrans={transformations.get('ytrans', 0)}).", + ) + if transformations.get("angle", 0) != 0: + self.coordinate_transform( + transform_type="rotation", + angle=transformations["angle"], + center=center, + keep=True, + ) + if verbose: + with results_box: + print(f"Applied rotation with angle={transformations['angle']}.") + + display(results_box) + + if plot: + img.set_data(self.slice_transformed.T) + axmin = np.min(self.slice_transformed, axis=(0, 1)) + axmax = np.max(self.slice_transformed, axis=(0, 1)) + if axmin < axmax: + img.set_clim(axmin, axmax) + fig.canvas.draw_idle() + + if transformations != self.transformations: + transformations["creation_date"] = datetime.now().timestamp() + self.transformations = transformations + + if verbose: + plt.figure() + subs = 20 + plt.title("Deformation field") + plt.scatter( + self.rdeform_field[::subs, ::subs].ravel(), + self.cdeform_field[::subs, ::subs].ravel(), + c="b", + ) + plt.show() + scale_slider.close() + xtrans_slider.close() + ytrans_slider.close() + angle_slider.close() + apply_button.close() + + apply_button = ipw.Button(description="apply") + display(apply_button) + apply_button.on_click(apply_func) + + if plot: + plt.show() + + if apply: + apply_func(True)
+ +
[docs] def calc_inverse_dfield(self): + """Calculate the inverse dfield from the cdeform and rdeform fields""" + self.inverse_dfield = generate_inverse_dfield( + self.rdeform_field, + self.cdeform_field, + self.bin_ranges, + self.detector_ranges, + ) + + return self.inverse_dfield
+ +
[docs] def view( # pylint: disable=dangerous-default-value + self, + image: np.ndarray = None, + origin: str = "lower", + cmap: str = "terrain_r", + figsize: Tuple[int, int] = (4, 4), + points: dict = None, + annotated: bool = False, + backend: str = "matplotlib", + imkwds: dict = {}, + scatterkwds: dict = {}, + cross: bool = False, + crosshair: bool = False, + crosshair_radii: List[int] = [50, 100, 150], + crosshair_thickness: int = 1, + **kwds, + ): + """Display image slice with specified annotations. + + Args: + image (np.ndarray, optional): The image to plot. Defaults to self.slice. + origin (str, optional): Figure origin specification ('lower' or 'upper'). + Defaults to "lower". + cmap (str, optional): Colormap specification. Defaults to "terrain_r". + figsize (Tuple[int, int], optional): Figure size. Defaults to (4, 4). + points (dict, optional): Points for annotation. Defaults to None. + annotated (bool, optional): Option to add annotation. Defaults to False. + backend (str, optional): Visualization backend specification. Defaults to + "matplotlib". + + - 'matplotlib': use static display rendered by matplotlib. + - 'bokeh': use interactive display rendered by bokeh. + + imkwds (dict, optional): Keyword arguments for + ``matplotlib.pyplot.imshow()``. Defaults to {}. + scatterkwds (dict, optional): Keyword arguments for + ``matplotlib.pyplot.scatter()``. Defaults to {}. + cross (bool, optional): Option to display a horizontal/vertical lines at + self.pcent. Defaults to False. + crosshair (bool, optional): Display option to plot circles around center + self.pcent. Works only in bokeh backend. Defaults to False. + crosshair_radii (List[int], optional): Pixel radii of circles to plot when + crosshair option is activated. Defaults to [50, 100, 150]. + crosshair_thickness (int, optional): Thickness of crosshair circles. + Defaults to 1. + **kwds: keyword arguments. + General extra arguments for the plotting procedure. + """ + if image is None: + image = self.slice + num_rows, num_cols = image.shape + + if points is None: + points = self.features + + if annotated: + tsr, tsc = kwds.pop("textshift", (3, 3)) + txtsize = kwds.pop("textsize", 12) + + if backend == "matplotlib": + fig, ax = plt.subplots(figsize=figsize) + ax.imshow(image.T, origin=origin, cmap=cmap, **imkwds) + + if cross: + center = self._config["momentum"]["center_pixel"] + ax.axvline(x=center[0]) + ax.axhline(y=center[1]) + + # Add annotation to the figure + if annotated: + for ( + p_keys, # pylint: disable=unused-variable + p_vals, + ) in points.items(): + try: + ax.scatter(p_vals[:, 0], p_vals[:, 1], **scatterkwds) + except IndexError: + try: + ax.scatter(p_vals[0], p_vals[1], **scatterkwds) + except IndexError: + pass + + if p_vals.size > 2: + for i_pval, pval in enumerate(p_vals): + ax.text( + pval[0] + tsc, + pval[1] + tsr, + str(i_pval), + fontsize=txtsize, + ) + + elif backend == "bokeh": + output_notebook(hide_banner=True) + colors = it.cycle(ColorCycle[10]) + ttp = [("(x, y)", "($x, $y)")] + figsize = kwds.pop("figsize", (320, 300)) + palette = cm2palette(cmap) # Retrieve palette colors + fig = pbk.figure( + width=figsize[0], + height=figsize[1], + tooltips=ttp, + x_range=(0, num_rows), + y_range=(0, num_cols), + ) + fig.image( + image=[image.T], + x=0, + y=0, + dw=num_rows, + dh=num_cols, + palette=palette, + **imkwds, + ) + + if annotated is True: + for p_keys, p_vals in points.items(): + try: + xcirc, ycirc = p_vals[:, 0], p_vals[:, 1] + fig.scatter( + xcirc, + ycirc, + size=8, + color=next(colors), + **scatterkwds, + ) + except IndexError: + try: + xcirc, ycirc = p_vals[0], p_vals[1] + fig.scatter( + xcirc, + ycirc, + size=8, + color=next(colors), + **scatterkwds, + ) + except IndexError: + pass + if crosshair and self.pcent is not None: + for radius in crosshair_radii: + fig.annulus( + x=[self.pcent[0]], + y=[self.pcent[1]], + inner_radius=radius - crosshair_thickness, + outer_radius=radius, + color="red", + alpha=0.6, + ) + + pbk.show(fig)
+ +
[docs] def select_k_range( + self, + point_a: Union[np.ndarray, List[int]] = None, + point_b: Union[np.ndarray, List[int]] = None, + k_distance: float = None, + k_coord_a: Union[np.ndarray, List[float]] = None, + k_coord_b: Union[np.ndarray, List[float]] = np.array([0.0, 0.0]), + equiscale: bool = True, + apply: bool = False, + ): + """Interactive selection function for features for the Momentum axes calibration. It allows + the user to select the pixel positions of two symmetry points (a and b) and the k-space + distance of the two. Alternatively, the coordinates of both points can be provided. See the + equiscale option for details on the specifications of point coordinates. + + Args: + point_a (Union[np.ndarray, List[int]], optional): Pixel coordinates of the + symmetry point a. + point_b (Union[np.ndarray, List[int]], optional): Pixel coordinates of the + symmetry point b. Defaults to the center pixel of the image, defined by + config["momentum"]["center_pixel"]. + k_distance (float, optional): The known momentum space distance between the + two symmetry points. + k_coord_a (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the symmetry points a. Only valid if equiscale=False. + k_coord_b (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the symmetry points b. Only valid if equiscale=False. Defaults to + the k-space center np.array([0.0, 0.0]). + equiscale (bool, optional): Option to adopt equal scale along both the x + and y directions. + + - **True**: Use a uniform scale for both x and y directions in the + image coordinate system. This applies to the situation where + k_distance is given and the points a and b are (close to) parallel + with one of the two image axes. + - **False**: Calculate the momentum scale for both x and y directions + separately. This applies to the situation where the points a and b + are sufficiently different in both x and y directions in the image + coordinate system. + + Defaults to 'True'. + + apply (bool, optional): Option to directly store the calibration parameters + to the class. Defaults to False. + + Raises: + ValueError: If no valid image is found from which to ge the coordinates. + """ + matplotlib.use("module://ipympl.backend_nbagg") + if self.slice_transformed is not None: + image = self.slice_transformed + elif self.slice_corrected is not None: + image = self.slice_corrected + elif self.slice is not None: + image = self.slice + else: + raise ValueError("No valid image loaded!") + + if point_b is None: + point_b = self._config["momentum"]["center_pixel"] + + if point_a is None: + point_a = [0, 0] + + fig, ax = plt.subplots(1, 1) + img = ax.imshow(image.T, origin="lower", cmap="terrain_r") + + (marker_a,) = ax.plot(point_a[0], point_a[1], "o") + (marker_b,) = ax.plot(point_b[0], point_b[1], "ro") + + def update( + point_a_x: int, + point_a_y: int, + point_b_x: int, + point_b_y: int, + k_distance: float, # noqa: ARG001 + ): + fig.canvas.draw_idle() + marker_a.set_xdata(point_a_x) + marker_a.set_ydata(point_a_y) + marker_b.set_xdata(point_b_x) + marker_b.set_ydata(point_b_y) + + point_a_input_x = ipw.IntText(point_a[0]) + point_a_input_y = ipw.IntText(point_a[1]) + point_b_input_x = ipw.IntText(point_b[0]) + point_b_input_y = ipw.IntText(point_b[1]) + k_distance_input = ipw.FloatText(k_distance) + ipw.interact( + update, + point_a_x=point_a_input_x, + point_a_y=point_a_input_y, + point_b_x=point_b_input_x, + point_b_y=point_b_input_y, + k_distance=k_distance_input, + ) + + self._state = 0 + + def onclick(event): + if self._state == 0: + point_a_input_x.value = event.xdata + point_a_input_y.value = event.ydata + self._state = 1 + else: + point_b_input_x.value = event.xdata + point_b_input_y.value = event.ydata + self._state = 0 + + cid = fig.canvas.mpl_connect("button_press_event", onclick) + + def apply_func(apply: bool): # noqa: ARG001 + point_a = [point_a_input_x.value, point_a_input_y.value] + point_b = [point_b_input_x.value, point_b_input_y.value] + calibration = self.calibrate( + point_a=point_a, + point_b=point_b, + k_distance=k_distance, + equiscale=equiscale, + k_coord_a=k_coord_a, + k_coord_b=k_coord_b, + ) + + img.set_extent(calibration["extent"]) + plt.title("Momentum calibrated data") + plt.xlabel("$k_x$", fontsize=15) + plt.ylabel("$k_y$", fontsize=15) + ax.axhline(0) + ax.axvline(0) + + fig.canvas.mpl_disconnect(cid) + + point_a_input_x.close() + point_a_input_y.close() + point_b_input_x.close() + point_b_input_y.close() + k_distance_input.close() + apply_button.close() + + fig.canvas.draw_idle() + + apply_button = ipw.Button(description="apply") + display(apply_button) + apply_button.on_click(apply_func) + + if apply: + apply_func(True) + + plt.show()
+ +
[docs] def calibrate( + self, + point_a: Union[np.ndarray, List[int]], + point_b: Union[np.ndarray, List[int]], + k_distance: float = None, + k_coord_a: Union[np.ndarray, List[float]] = None, + k_coord_b: Union[np.ndarray, List[float]] = np.array([0.0, 0.0]), + equiscale: bool = True, + image: np.ndarray = None, + ) -> dict: + """Momentum axes calibration using the pixel positions of two symmetry points + (a and b) and the absolute coordinate of a single point (b), defaulted to + [0., 0.]. All coordinates should be specified in the (x/y), i.e. (column_index, + row_index) format. See the equiscale option for details on the specifications + of point coordinates. + + Args: + point_a (Union[np.ndarray, List[int]], optional): Pixel coordinates of the + symmetry point a. + point_b (Union[np.ndarray, List[int]], optional): Pixel coordinates of the + symmetry point b. Defaults to the center pixel of the image, defined by + config["momentum"]["center_pixel"]. + k_distance (float, optional): The known momentum space distance between the + two symmetry points. + k_coord_a (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the symmetry points a. Only valid if equiscale=False. + k_coord_b (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the symmetry points b. Only valid if equiscale=False. Defaults to + the k-space center np.array([0.0, 0.0]). + equiscale (bool, optional): Option to adopt equal scale along both the x + and y directions. + + - **True**: Use a uniform scale for both x and y directions in the + image coordinate system. This applies to the situation where + k_distance is given and the points a and b are (close to) parallel + with one of the two image axes. + - **False**: Calculate the momentum scale for both x and y directions + separately. This applies to the situation where the points a and b + are sufficiently different in both x and y directions in the image + coordinate system. + + Defaults to 'True'. + image (np.ndarray, optional): The energy slice for which to return the + calibration. Defaults to self.slice_corrected. + + Returns: + dict: dictionary with following entries: + + - "axes": Tuple of 1D arrays + Momentum coordinates of the row and column. + - "extent": list + Extent of the two momentum axis (can be used directly in imshow). + - "grid": Tuple of 2D arrays + Row and column mesh grid generated from the coordinates + (can be used directly in pcolormesh). + - "coeffs": Tuple of (x, y) calibration coefficients + - "x_center", "y_center": Pixel positions of the k-space center + - "cstart", "rstart": Detector positions of the image used for + calibration + - "cstep", "rstep": Step size of detector coordinates in the image + used for calibration + """ + if image is None: + image = self.slice_corrected + + nrows, ncols = image.shape + point_a, point_b = map(np.array, [point_a, point_b]) + + rowdist = range(nrows) - point_b[0] + coldist = range(ncols) - point_b[1] + + if equiscale is True: + assert k_distance is not None + # Use the same conversion factor along both x and y directions + # (need k_distance) + pixel_distance = norm(point_a - point_b) + # Calculate the pixel to momentum conversion factor + xratio = yratio = k_distance / pixel_distance + + else: + assert k_coord_a is not None + # Calculate the conversion factor along x and y directions separately + # (need k_coord_a) + kxb, kyb = k_coord_b + kxa, kya = k_coord_a + # Calculate the column- and row-wise conversion factor + xratio = (kxa - kxb) / (point_a[0] - point_b[0]) + yratio = (kya - kyb) / (point_a[1] - point_b[1]) + + k_row = rowdist * xratio + k_coord_b[0] + k_col = coldist * yratio + k_coord_b[1] + + # Calculate other return parameters + k_rowgrid, k_colgrid = np.meshgrid(k_row, k_col) + + # Assemble into return dictionary + self.calibration = {} + self.calibration["creation_date"] = datetime.now().timestamp() + self.calibration["kx_axis"] = k_row + self.calibration["ky_axis"] = k_col + self.calibration["grid"] = (k_rowgrid, k_colgrid) + self.calibration["extent"] = (k_row[0], k_row[-1], k_col[0], k_col[-1]) + self.calibration["kx_scale"] = xratio + self.calibration["ky_scale"] = yratio + self.calibration["x_center"] = point_b[0] - k_coord_b[0] / xratio + self.calibration["y_center"] = point_b[1] - k_coord_b[1] / yratio + # copy parameters for applying calibration + try: + self.calibration["rstart"] = self.bin_ranges[0][0] + self.calibration["cstart"] = self.bin_ranges[1][0] + self.calibration["rstep"] = (self.bin_ranges[0][1] - self.bin_ranges[0][0]) / nrows + self.calibration["cstep"] = (self.bin_ranges[1][1] - self.bin_ranges[1][0]) / ncols + except (AttributeError, IndexError): + pass + + return self.calibration
+ +
[docs] def apply_corrections( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + x_column: str = None, + y_column: str = None, + new_x_column: str = None, + new_y_column: str = None, + verbose: bool = True, + **kwds, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Calculate and replace the X and Y values with their distortion-corrected + version. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to apply + the distortion correction to. + x_column (str, optional): Label of the 'X' column before momentum + distortion correction. Defaults to config["momentum"]["x_column"]. + y_column (str, optional): Label of the 'Y' column before momentum + distortion correction. Defaults to config["momentum"]["y_column"]. + new_x_column (str, optional): Label of the 'X' column after momentum + distortion correction. + Defaults to config["momentum"]["corrected_x_column"]. + new_y_column (str, optional): Label of the 'Y' column after momentum + distortion correction. + Defaults to config["momentum"]["corrected_y_column"]. + verbose (bool, optional): Option to report the used landmarks for correction. + Defaults to True. + **kwds: Keyword arguments: + + - **dfield**: Inverse dfield + - **cdeform_field**, **rdeform_field**: Column- and row-wise forward + deformation fields. + + Additional keyword arguments are passed to ``apply_dfield``. + + Returns: + Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: Dataframe with + added columns and momentum correction metadata dictionary. + """ + if x_column is None: + x_column = self.x_column + if y_column is None: + y_column = self.y_column + + if new_x_column is None: + new_x_column = self.corrected_x_column + if new_y_column is None: + new_y_column = self.corrected_y_column + + if self.inverse_dfield is None or self.dfield_updated: + if self.rdeform_field is None and self.cdeform_field is None: + if self.correction or self.transformations: + if self.correction: + # Generate spline warp from class features or config + self.spline_warp_estimate(verbose=verbose) + if self.transformations: + # Apply config pose adjustments + self.pose_adjustment() + else: + raise ValueError("No corrections or transformations defined!") + + self.inverse_dfield = generate_inverse_dfield( + self.rdeform_field, + self.cdeform_field, + self.bin_ranges, + self.detector_ranges, + ) + self.dfield_updated = False + + out_df = df.map_partitions( + apply_dfield, + dfield=self.inverse_dfield, + x_column=x_column, + y_column=y_column, + new_x_column=new_x_column, + new_y_column=new_y_column, + detector_ranges=self.detector_ranges, + **kwds, + ) + + metadata = self.gather_correction_metadata() + + return out_df, metadata
+ +
[docs] def gather_correction_metadata(self) -> dict: + """Collect meta data for momentum correction. + + Returns: + dict: generated correction metadata dictionary. + """ + metadata: Dict[Any, Any] = {} + if len(self.correction) > 0: + metadata["correction"] = self.correction + metadata["correction"]["applied"] = True + metadata["correction"]["cdeform_field"] = self.cdeform_field + metadata["correction"]["rdeform_field"] = self.rdeform_field + try: + metadata["correction"]["creation_date"] = self.correction["creation_date"] + except KeyError: + pass + if len(self.adjust_params) > 0: + metadata["registration"] = self.adjust_params + metadata["registration"]["creation_date"] = datetime.now().timestamp() + metadata["registration"]["applied"] = True + metadata["registration"]["depends_on"] = ( + "/entry/process/registration/transformations/rot_z" + if "angle" in metadata["registration"] and metadata["registration"]["angle"] + else "/entry/process/registration/transformations/trans_y" + if "xtrans" in metadata["registration"] and metadata["registration"]["xtrans"] + else "/entry/process/registration/transformations/trans_x" + if "ytrans" in metadata["registration"] and metadata["registration"]["ytrans"] + else "." + ) + if ( + "ytrans" in metadata["registration"] and metadata["registration"]["ytrans"] + ): # swapped definitions + metadata["registration"]["trans_x"] = {} + metadata["registration"]["trans_x"]["value"] = metadata["registration"]["ytrans"] + metadata["registration"]["trans_x"]["type"] = "translation" + metadata["registration"]["trans_x"]["units"] = "pixel" + metadata["registration"]["trans_x"]["vector"] = np.asarray( + [1.0, 0.0, 0.0], + ) + metadata["registration"]["trans_x"]["depends_on"] = "." + if "xtrans" in metadata["registration"] and metadata["registration"]["xtrans"]: + metadata["registration"]["trans_y"] = {} + metadata["registration"]["trans_y"]["value"] = metadata["registration"]["xtrans"] + metadata["registration"]["trans_y"]["type"] = "translation" + metadata["registration"]["trans_y"]["units"] = "pixel" + metadata["registration"]["trans_y"]["vector"] = np.asarray( + [0.0, 1.0, 0.0], + ) + metadata["registration"]["trans_y"]["depends_on"] = ( + "/entry/process/registration/transformations/trans_x" + if "ytrans" in metadata["registration"] and metadata["registration"]["ytrans"] + else "." + ) + if "angle" in metadata["registration"] and metadata["registration"]["angle"]: + metadata["registration"]["rot_z"] = {} + metadata["registration"]["rot_z"]["value"] = metadata["registration"]["angle"] + metadata["registration"]["rot_z"]["type"] = "rotation" + metadata["registration"]["rot_z"]["units"] = "degrees" + metadata["registration"]["rot_z"]["vector"] = np.asarray( + [0.0, 0.0, 1.0], + ) + metadata["registration"]["rot_z"]["offset"] = np.concatenate( + (metadata["registration"]["center"], [0.0]), + ) + metadata["registration"]["rot_z"]["depends_on"] = ( + "/entry/process/registration/transformations/trans_y" + if "xtrans" in metadata["registration"] and metadata["registration"]["xtrans"] + else "/entry/process/registration/transformations/trans_x" + if "ytrans" in metadata["registration"] and metadata["registration"]["ytrans"] + else "." + ) + + return metadata
+ +
[docs] def append_k_axis( + self, + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + x_column: str = None, + y_column: str = None, + new_x_column: str = None, + new_y_column: str = None, + calibration: dict = None, + **kwds, + ) -> Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: + """Calculate and append the k axis coordinates (kx, ky) to the events dataframe. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to apply the + distortion correction to. + x_column (str, optional): Label of the source 'X' column. + Defaults to config["momentum"]["corrected_x_column"] or + config["momentum"]["x_column"] (whichever is present). + y_column (str, optional): Label of the source 'Y' column. + Defaults to config["momentum"]["corrected_y_column"] or + config["momentum"]["y_column"] (whichever is present). + new_x_column (str, optional): Label of the destination 'X' column after + momentum calibration. Defaults to config["momentum"]["kx_column"]. + new_y_column (str, optional): Label of the destination 'Y' column after + momentum calibration. Defaults to config["momentum"]["ky_column"]. + calibration (dict, optional): Dictionary containing calibration parameters. + Defaults to 'self.calibration' or config["momentum"]["calibration"]. + **kwds: Keyword parameters for momentum calibration. Parameters are added + to the calibration dictionary. + + Returns: + Tuple[Union[pd.DataFrame, dask.dataframe.DataFrame], dict]: Dataframe with + added columns and momentum calibration metadata dictionary. + """ + if x_column is None: + if self.corrected_x_column in df.columns: + x_column = self.corrected_x_column + else: + x_column = self.x_column + if y_column is None: + if self.corrected_y_column in df.columns: + y_column = self.corrected_y_column + else: + y_column = self.y_column + + if new_x_column is None: + new_x_column = self.kx_column + + if new_y_column is None: + new_y_column = self.ky_column + + # pylint: disable=duplicate-code + if calibration is None: + calibration = deepcopy(self.calibration) + + if len(kwds) > 0: + for key, value in kwds.items(): + calibration[key] = value + calibration["creation_date"] = datetime.now().timestamp() + + try: + (df[new_x_column], df[new_y_column]) = detector_coordinates_2_k_coordinates( + r_det=df[x_column], + c_det=df[y_column], + r_start=calibration["rstart"], + c_start=calibration["cstart"], + r_center=calibration["x_center"], + c_center=calibration["y_center"], + r_conversion=calibration["kx_scale"], + c_conversion=calibration["ky_scale"], + r_step=calibration["rstep"], + c_step=calibration["cstep"], + ) + except KeyError as exc: + raise ValueError( + "Required calibration parameters missing!", + ) from exc + + metadata = self.gather_calibration_metadata(calibration=calibration) + + return df, metadata
+ +
[docs] def gather_calibration_metadata(self, calibration: dict = None) -> dict: + """Collect meta data for momentum calibration + + Args: + calibration (dict, optional): Dictionary with momentum calibration + parameters. If omitted will be taken from the class. + + Returns: + dict: Generated metadata dictionary. + """ + if calibration is None: + calibration = self.calibration + metadata: Dict[Any, Any] = {} + try: + metadata["creation_date"] = calibration["creation_date"] + except KeyError: + pass + metadata["applied"] = True + metadata["calibration"] = calibration + # create empty calibrated axis entries, if they are not present. + if "kx_axis" not in metadata["calibration"]: + metadata["calibration"]["kx_axis"] = 0 + if "ky_axis" not in metadata["calibration"]: + metadata["calibration"]["ky_axis"] = 0 + + return metadata
+ + +
[docs]def cm2palette(cmap_name: str) -> list: + """Convert certain matplotlib colormap (cm) to bokeh palette. + + Args: + cmap_name (str): Name of the colormap/palette. + + Returns: + list: List of colors in hex representation (a bokeh palette). + """ + if cmap_name in bp.all_palettes.keys(): + palette_func = getattr(bp, cmap_name) + palette = palette_func + + else: + palette_func = getattr(cm, cmap_name) + mpl_cm_rgb = (255 * palette_func(range(256))).astype("int") + palette = [RGB(*tuple(rgb)).to_hex() for rgb in mpl_cm_rgb] + + return palette
+ + +
[docs]def dictmerge( + main_dict: dict, + other_entries: Union[List[dict], Tuple[dict], dict], +) -> dict: + """Merge a dictionary with other dictionaries. + + Args: + main_dict (dict): Main dictionary. + other_entries (Union[List[dict], Tuple[dict], dict]): + Other dictionary or composite dictionarized elements. + + Returns: + dict: Merged dictionary. + """ + if isinstance( + other_entries, + ( + list, + tuple, + ), + ): # Merge main_dict with a list or tuple of dictionaries + for oth in other_entries: + main_dict = {**main_dict, **oth} + + elif isinstance(other_entries, dict): # Merge D with a single dictionary + main_dict = {**main_dict, **other_entries} + + return main_dict
+ + +
[docs]def detector_coordinates_2_k_coordinates( + r_det: float, + c_det: float, + r_start: float, + c_start: float, + r_center: float, + c_center: float, + r_conversion: float, + c_conversion: float, + r_step: float, + c_step: float, +) -> Tuple[float, float]: + """Conversion from detector coordinates (r_det, c_det) to momentum coordinates + (kr, kc). + + Args: + r_det (float): Row detector coordinates. + c_det (float): Column detector coordinates. + r_start (float): Start value for row detector coordinates. + c_start (float): Start value for column detector coordinates. + r_center (float): Center value for row detector coordinates. + c_center (float): Center value for column detector coordinates. + r_conversion (float): Row conversion factor. + c_conversion (float): Column conversion factor. + r_step (float): Row stepping factor. + c_step (float): Column stepping factor. + + Returns: + Tuple[float, float]: Converted momentum space row/column coordinates. + """ + r_det0 = r_start + r_step * r_center + c_det0 = c_start + c_step * c_center + k_r = r_conversion * ((r_det - r_det0) / r_step) + k_c = c_conversion * ((c_det - c_det0) / c_step) + + return (k_r, k_c)
+ + +
[docs]def apply_dfield( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + dfield: np.ndarray, + x_column: str, + y_column: str, + new_x_column: str, + new_y_column: str, + detector_ranges: List[Tuple], +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Application of the inverse displacement-field to the dataframe coordinates. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to apply the + distortion correction to. + dfield (np.ndarray): The distortion correction field. 3D matrix, + with column and row distortion fields stacked along the first dimension. + x_column (str): Label of the 'X' source column. + y_column (str): Label of the 'Y' source column. + new_x_column (str): Label of the 'X' destination column. + new_y_column (str): Label of the 'Y' destination column. + detector_ranges (List[Tuple]): tuple of pixel ranges of the detector x/y + coordinates + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added columns + """ + x = df[x_column] + y = df[y_column] + + r_axis_steps = (detector_ranges[0][1] - detector_ranges[0][0]) / dfield[0].shape[0] + c_axis_steps = (detector_ranges[1][1] - detector_ranges[1][0]) / dfield[0].shape[1] + + df[new_x_column], df[new_y_column] = ( + map_coordinates(dfield[0], (x, y), order=1) * r_axis_steps, + map_coordinates(dfield[1], (x, y), order=1) * c_axis_steps, + ) + return df
+ + +
[docs]def generate_inverse_dfield( + rdeform_field: np.ndarray, + cdeform_field: np.ndarray, + bin_ranges: List[Tuple], + detector_ranges: List[Tuple], +) -> np.ndarray: + """Generate inverse deformation field using interpolation with griddata. + Assuming the binning range of the input ``rdeform_field`` and ``cdeform_field`` + covers the whole detector. + + Args: + rdeform_field (np.ndarray): Row-wise deformation field. + cdeform_field (np.ndarray): Column-wise deformation field. + bin_ranges (List[Tuple]): Detector ranges of the binned coordinates. + detector_ranges (List[Tuple]): Ranges of detector coordinates to interpolate to. + + Returns: + np.ndarray: The calculated inverse deformation field (row/column) + """ + print( + "Calculating inverse deformation field, this might take a moment...", + ) + + # Interpolate to 2048x2048 grid of the detector coordinates + r_mesh, c_mesh = np.meshgrid( + np.linspace( + detector_ranges[0][0], + cdeform_field.shape[0], + detector_ranges[0][1], + endpoint=False, + ), + np.linspace( + detector_ranges[1][0], + cdeform_field.shape[1], + detector_ranges[1][1], + endpoint=False, + ), + sparse=False, + indexing="ij", + ) + + bin_step = ( + np.asarray(bin_ranges)[0:2][:, 1] - np.asarray(bin_ranges)[0:2][:, 0] + ) / cdeform_field.shape + rc_position = [] # row/column position in c/rdeform_field + r_dest = [] # destination pixel row position + c_dest = [] # destination pixel column position + for i in np.arange(cdeform_field.shape[0]): + for j in np.arange(cdeform_field.shape[1]): + if not np.isnan(rdeform_field[i, j]) and not np.isnan( + cdeform_field[i, j], + ): + rc_position.append( + [ + rdeform_field[i, j] + bin_ranges[0][0] / bin_step[0], + cdeform_field[i, j] + bin_ranges[0][0] / bin_step[1], + ], + ) + r_dest.append( + bin_step[0] * i + bin_ranges[0][0], + ) + c_dest.append( + bin_step[1] * j + bin_ranges[1][0], + ) + + ret = Parallel(n_jobs=2)( + delayed(griddata)(np.asarray(rc_position), np.asarray(arg), (r_mesh, c_mesh)) + for arg in [r_dest, c_dest] + ) + + inverse_dfield = np.asarray([ret[0], ret[1]]) + + return inverse_dfield
+ + +
[docs]def load_dfield(file: str) -> Tuple[np.ndarray, np.ndarray]: + """Load inverse dfield from file + + Args: + file (str): Path to file containing the inverse dfield + + Returns: + np.ndarray: the loaded inverse deformation field + """ + rdeform_field: np.ndarray = None + cdeform_field: np.ndarray = None + + try: + dfield = np.load(file) + rdeform_field = dfield[0] + cdeform_field = dfield[1] + + except FileNotFoundError: + pass + + return rdeform_field, cdeform_field
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/core/config.html b/sed/2.1.0/_modules/sed/core/config.html new file mode 100644 index 0000000..6849881 --- /dev/null +++ b/sed/2.1.0/_modules/sed/core/config.html @@ -0,0 +1,721 @@ + + + + + + + + + + sed.core.config — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.core.config

+"""This module contains a config library for loading yaml/json files into dicts
+"""
+import copy
+import json
+import os
+import platform
+from importlib.util import find_spec
+from pathlib import Path
+from typing import Union
+
+import yaml
+from platformdirs import user_config_path
+
+package_dir = os.path.dirname(find_spec("sed").origin)
+
+USER_CONFIG_PATH = user_config_path(appname="sed", appauthor="OpenCOMPES", ensure_exists=True)
+
+
+
[docs]def parse_config( + config: Union[dict, str] = None, + folder_config: Union[dict, str] = None, + user_config: Union[dict, str] = None, + system_config: Union[dict, str] = None, + default_config: Union[ + dict, + str, + ] = f"{package_dir}/config/default.yaml", + verbose: bool = True, +) -> dict: + """Load the config dictionary from a file, or pass the provided config dictionary. + The content of the loaded config dictionary is then completed from a set of pre-configured + config files in hierarchical order, by adding missing items. These additional config files + are searched for in different places on the system as detailed below. Alternatively, they + can be also passed as optional arguments (file path strings or dictionaries). + + Args: + config (Union[dict, str], optional): config dictionary or file path. + Files can be *json* or *yaml*. Defaults to None. + folder_config (Union[ dict, str, ], optional): working-folder-based config dictionary + or file path. The loaded dictionary is completed with the folder-based values, + taking preference over user, system and default values. Defaults to the file + "sed_config.yaml" in the current working directory. + user_config (Union[ dict, str, ], optional): user-based config dictionary + or file path. The loaded dictionary is completed with the user-based values, + taking preference over system and default values. + Defaults to the file ".sed/config.yaml" in the current user's home directory. + system_config (Union[ dict, str, ], optional): system-wide config dictionary + or file path. The loaded dictionary is completed with the system-wide values, + taking preference over default values. Defaults to the file "/etc/sed/config.yaml" + on linux, and "%ALLUSERSPROFILE%/sed/config.yaml" on windows. + default_config (Union[ dict, str, ], optional): default config dictionary + or file path. The loaded dictionary is completed with the default values. + Defaults to *package_dir*/config/default.yaml". + verbose (bool, optional): Option to report loaded config files. Defaults to True. + Raises: + TypeError: Raised if the provided file is neither *json* nor *yaml*. + FileNotFoundError: Raised if the provided file is not found. + + Returns: + dict: Loaded and possibly completed config dictionary. + """ + if config is None: + config = {} + + if isinstance(config, dict): + config_dict = copy.deepcopy(config) + else: + config_dict = load_config(config) + if verbose: + print(f"Configuration loaded from: [{str(Path(config).resolve())}]") + + folder_dict: dict = None + if isinstance(folder_config, dict): + folder_dict = copy.deepcopy(folder_config) + else: + if folder_config is None: + folder_config = "./sed_config.yaml" + if Path(folder_config).exists(): + folder_dict = load_config(folder_config) + if verbose: + print(f"Folder config loaded from: [{str(Path(folder_config).resolve())}]") + + user_dict: dict = None + if isinstance(user_config, dict): + user_dict = copy.deepcopy(user_config) + else: + if user_config is None: + user_config = str( + Path.home().joinpath(".sed").joinpath("config.yaml"), + ) + if Path(user_config).exists(): + user_dict = load_config(user_config) + if verbose: + print(f"User config loaded from: [{str(Path(user_config).resolve())}]") + + system_dict: dict = None + if isinstance(system_config, dict): + system_dict = copy.deepcopy(system_config) + else: + if system_config is None: + if platform.system() in ["Linux", "Darwin"]: + system_config = str( + Path("/etc/").joinpath("sed").joinpath("config.yaml"), + ) + elif platform.system() == "Windows": + system_config = str( + Path(os.environ["ALLUSERSPROFILE"]).joinpath("sed").joinpath("config.yaml"), + ) + if Path(system_config).exists(): + system_dict = load_config(system_config) + if verbose: + print(f"System config loaded from: [{str(Path(system_config).resolve())}]") + + if isinstance(default_config, dict): + default_dict = copy.deepcopy(default_config) + else: + default_dict = load_config(default_config) + if verbose: + print(f"Default config loaded from: [{str(Path(default_config).resolve())}]") + + if folder_dict is not None: + config_dict = complete_dictionary( + dictionary=config_dict, + base_dictionary=folder_dict, + ) + if user_dict is not None: + config_dict = complete_dictionary( + dictionary=config_dict, + base_dictionary=user_dict, + ) + if system_dict is not None: + config_dict = complete_dictionary( + dictionary=config_dict, + base_dictionary=system_dict, + ) + config_dict = complete_dictionary( + dictionary=config_dict, + base_dictionary=default_dict, + ) + + return config_dict
+ + +
[docs]def load_config(config_path: str) -> dict: + """Loads config parameter files. + + Args: + config_path (str): Path to the config file. Json or Yaml format are supported. + + Raises: + FileNotFoundError: Raised if the config file cannot be found. + TypeError: Raised if the provided file is neither *json* nor *yaml*. + + Returns: + dict: loaded config dictionary + """ + config_file = Path(config_path) + if not config_file.is_file(): + raise FileNotFoundError( + f"could not find the configuration file: {config_file}", + ) + + if config_file.suffix == ".json": + with open(config_file, encoding="utf-8") as stream: + config_dict = json.load(stream) + elif config_file.suffix == ".yaml": + with open(config_file, encoding="utf-8") as stream: + config_dict = yaml.safe_load(stream) + else: + raise TypeError("config file must be of type json or yaml!") + + return config_dict
+ + +
[docs]def save_config(config_dict: dict, config_path: str, overwrite: bool = False): + """Function to save a given config dictionary to a json or yaml file. Normally, it loads any + existing file of the given name, and keeps any existing dictionary keys not present in the + provided dictionary. The overwrite option creates a fully empty dictionary first. + + Args: + config_dict (dict): The dictionary to save. + config_path (str): A string containing the path to the file where to save the dictionary + to. + overwrite (bool, optional): Option to overwrite an existing file with the given dictionary. + Defaults to False. + """ + config_file = Path(config_path) + if config_file.is_file() and not overwrite: + existing_config = load_config(config_path=config_path) + else: + existing_config = {} + + new_config = complete_dictionary(config_dict, existing_config) + + if config_file.suffix == ".json": + with open(config_file, mode="w", encoding="utf-8") as stream: + json.dump(new_config, stream, indent=2) + elif config_file.suffix == ".yaml": + with open(config_file, mode="w", encoding="utf-8") as stream: + config_dict = yaml.dump(new_config, stream) + else: + raise TypeError("config file must be of type json or yaml!")
+ + +
[docs]def complete_dictionary(dictionary: dict, base_dictionary: dict) -> dict: + """Iteratively completes a dictionary from a base dictionary, by adding keys that are missing + in the dictionary, and are present in the base dictionary. + + Args: + dictionary (dict): the dictionary to be completed. + base_dictionary (dict): the base dictionary. + + Returns: + dict: the completed (merged) dictionary + """ + if base_dictionary: + for k, v in base_dictionary.items(): + if isinstance(v, dict): + if k not in dictionary.keys(): + dictionary[k] = v + else: + if not isinstance(dictionary[k], dict): + raise ValueError( + "Cannot merge dictionaries. " + f"Mismatch on Key {k}: {dictionary[k]}, {v}.", + ) + dictionary[k] = complete_dictionary(dictionary[k], v) + else: + if k not in dictionary.keys(): + dictionary[k] = v + + return dictionary
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/core/dfops.html b/sed/2.1.0/_modules/sed/core/dfops.html new file mode 100644 index 0000000..abb4fd0 --- /dev/null +++ b/sed/2.1.0/_modules/sed/core/dfops.html @@ -0,0 +1,932 @@ + + + + + + + + + + sed.core.dfops — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.core.dfops

+"""This module contains dataframe operations functions for the sed package
+
+"""
+# Note: some of the functions presented here were
+# inspired by https://github.com/mpes-kit/mpes
+from typing import Callable
+from typing import Sequence
+from typing import Union
+
+import dask.dataframe
+import numpy as np
+import pandas as pd
+from dask.diagnostics import ProgressBar
+
+
+
[docs]def apply_jitter( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + cols: Union[str, Sequence[str]], + cols_jittered: Union[str, Sequence[str]] = None, + amps: Union[float, Sequence[float]] = 0.5, + jitter_type: str = "uniform", +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Add jittering to one or more dataframe columns. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to add + noise/jittering to. + cols (Union[str, Sequence[str]]): Names of the columns to add jittering to. + cols_jittered (Union[str, Sequence[str]], optional): Names of the columns + with added jitter. Defaults to None. + amps (Union[float, Sequence[float]], optional): Amplitude scalings for the + jittering noise. If one number is given, the same is used for all axes. + For normal noise, the added noise will have stdev [-amp, +amp], for + uniform noise it will cover the interval [-amp, +amp]. + Defaults to 0.5. + jitter_type (str, optional): the type of jitter to add. 'uniform' or 'normal' + distributed noise. Defaults to "uniform". + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: dataframe with added columns. + """ + assert cols is not None, "cols needs to be provided!" + assert jitter_type in ( + "uniform", + "normal", + ), "type needs to be one of 'normal', 'uniform'!" + + if isinstance(cols, str): + cols = [cols] + if isinstance(cols_jittered, str): + cols_jittered = [cols_jittered] + if cols_jittered is None: + cols_jittered = [col + "_jittered" for col in cols] + if isinstance(amps, float): + amps = list(np.ones(len(cols)) * amps) + + colsize = df[cols[0]].size + + if jitter_type == "uniform": + # Uniform Jitter distribution + jitter = np.random.uniform(low=-1, high=1, size=colsize) + elif jitter_type == "normal": + # Normal Jitter distribution works better for non-linear transformations and + # jitter sizes that don't match the original bin sizes + jitter = np.random.standard_normal(size=colsize) + + for col, col_jittered, amp in zip(cols, cols_jittered, amps): + df[col_jittered] = df[col] + amp * jitter + + return df
+ + +
[docs]def drop_column( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + column_name: Union[str, Sequence[str]], +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Delete columns. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + column_name (Union[str, Sequence[str]])): List of column names to be dropped. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: Dataframe with dropped columns. + """ + out_df = df.drop(column_name, axis=1) + + return out_df
+ + +
[docs]def apply_filter( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + col: str, + lower_bound: float = -np.inf, + upper_bound: float = np.inf, +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Application of bound filters to a specified column (can be used consecutively). + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + col (str): Name of the column to filter. Passing "index" for col will + filter on the index in each dataframe partition. + lower_bound (float, optional): The lower bound used in the filtering. + Defaults to -np.inf. + upper_bound (float, optional): The lower bound used in the filtering. + Defaults to np.inf. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: The filtered dataframe. + """ + df = df.copy() + if col == "index": + df["index"] = df.index + + out_df = df[(df[col] > lower_bound) & (df[col] < upper_bound)] + + if col == "index": + out_df = drop_column(out_df, "index") + + return out_df
+ + +
[docs]def add_time_stamped_data( + df: dask.dataframe.DataFrame, + time_stamps: np.ndarray, + data: np.ndarray, + dest_column: str, + time_stamp_column: str, + **kwds, +) -> dask.dataframe.DataFrame: + """Add data in form of timestamp/value pairs to the dataframe using interpolation to the + timestamps in the dataframe. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + time_stamps (np.ndarray): Time stamps of the values to add + data (np.ndarray): Values corresponding at the time stamps in time_stamps + dest_column (str): destination column name + time_stamp_column (str): Time stamp column name + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: Dataframe with added column + """ + if time_stamp_column not in df.columns: + raise ValueError(f"{time_stamp_column} not found in dataframe!") + + if len(time_stamps) != len(data): + raise ValueError("time_stamps and data have to be of same length!") + + def interpolate_timestamps( + df: dask.dataframe.DataFrame, + ) -> dask.dataframe.DataFrame: + df_timestamps = df[time_stamp_column] + df[dest_column] = np.interp(df_timestamps, time_stamps, data) + return df + + if not isinstance(df, dask.dataframe.DataFrame): + raise ValueError("This function only works for Dask Dataframes!") + + df = df.map_partitions(interpolate_timestamps, **kwds) + + return df
+ + +
[docs]def map_columns_2d( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + map_2d: Callable, + x_column: str, + y_column: str, + **kwds, +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Apply a 2-dimensional mapping simultaneously to two dimensions. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + map_2d (Callable): 2D mapping function. + x_column (str): The X column of the dataframe to apply mapping to. + y_column (str): The Y column of the dataframe to apply mapping to. + **kwds: Additional arguments for the 2D mapping function. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: Dataframe with mapped columns. + """ + new_x_column = kwds.pop("new_x_column", x_column) + new_y_column = kwds.pop("new_y_column", y_column) + + (df[new_x_column], df[new_y_column]) = map_2d( + df[x_column], + df[y_column], + **kwds, + ) + + return df
+ + +
[docs]def forward_fill_lazy( + df: dask.dataframe.DataFrame, + columns: Sequence[str] = None, + before: Union[str, int] = "max", + compute_lengths: bool = False, + iterations: int = 2, +) -> dask.dataframe.DataFrame: + """Forward fill the specified columns multiple times in a dask dataframe. + + Allows forward filling between partitions. This is useful for dataframes + that have sparse data, such as those with many NaNs. + Running the forward filling multiple times can fix the issue of having + entire partitions consisting of NaNs. By default we run this twice, which + is enough to fix the issue for dataframes with no consecutive partitions of NaNs. + + Args: + df (dask.dataframe.DataFrame): The dataframe to forward fill. + columns (list): The columns to forward fill. If None, fills all columns + before (int, str, optional): The number of rows to include before the current partition. + if 'max' it takes as much as possible from the previous partition, which is + the size of the smallest partition in the dataframe. Defaults to 'max'. + compute_lengths (bool, optional): Whether to compute the length of each partition + iterations (int, optional): The number of times to forward fill the dataframe. + + Returns: + dask.dataframe.DataFrame: The dataframe with the specified columns forward filled. + """ + if columns is None: + columns = df.columns + elif isinstance(columns, str): + columns = [columns] + elif len(columns) == 0: + raise ValueError("columns must be a non-empty list of strings!") + for c in columns: + if c not in df.columns: + raise KeyError(f"{c} not in dataframe!") + + # Define a custom function to forward fill specified columns + def forward_fill_partition(df): + df[columns] = df[columns].ffill() + return df + + # calculate the number of rows in each partition and choose least + if before == "max": + nrows = df.map_partitions(len) + if compute_lengths: + with ProgressBar(): + print("Computing dataframe shape...") + nrows = nrows.compute() + before = min(nrows) + elif not isinstance(before, int): + raise TypeError('before must be an integer or "max"') + # Use map_overlap to apply forward_fill_partition + for _ in range(iterations): + df = df.map_overlap( + forward_fill_partition, + before=before, + after=0, + ) + return df
+ + +
[docs]def backward_fill_lazy( + df: dask.dataframe.DataFrame, + columns: Sequence[str] = None, + after: Union[str, int] = "max", + compute_lengths: bool = False, + iterations: int = 1, +) -> dask.dataframe.DataFrame: + """Forward fill the specified columns multiple times in a dask dataframe. + + Allows backward filling between partitions. Similar to forward fill, but backwards. + This helps to fill the initial values of a dataframe, which are often NaNs. + Use with care as the assumption of the values being the same in the past is often not true. + + Args: + df (dask.dataframe.DataFrame): The dataframe to forward fill. + columns (list): The columns to forward fill. If None, fills all columns + after (int, str, optional): The number of rows to include after the current partition. + if 'max' it takes as much as possible from the previous partition, which is + the size of the smallest partition in the dataframe. Defaults to 'max'. + compute_lengths (bool, optional): Whether to compute the length of each partition + iterations (int, optional): The number of times to backward fill the dataframe. + + Returns: + dask.dataframe.DataFrame: The dataframe with the specified columns backward filled. + """ + if columns is None: + columns = df.columns + elif isinstance(columns, str): + columns = [columns] + elif len(columns) == 0: + raise ValueError("columns must be a non-empty list of strings!") + for c in columns: + if c not in df.columns: + raise KeyError(f"{c} not in dataframe!") + + # Define a custom function to forward fill specified columns + def backward_fill_partition(df): + df[columns] = df[columns].bfill() + return df + + # calculate the number of rows in each partition and choose least + if after == "max": + nrows = df.map_partitions(len) + if compute_lengths: + with ProgressBar(): + print("Computing dataframe shape...") + nrows = nrows.compute() + after = min(nrows) + elif not isinstance(after, int): + raise TypeError('before must be an integer or "max"') + # Use map_overlap to apply forward_fill_partition + for _ in range(iterations): + df = df.map_overlap( + backward_fill_partition, + before=0, + after=after, + ) + return df
+ + +
[docs]def offset_by_other_columns( + df: dask.dataframe.DataFrame, + target_column: str, + offset_columns: Union[str, Sequence[str]], + weights: Union[float, Sequence[float]], + reductions: Union[str, Sequence[str]] = None, + preserve_mean: Union[bool, Sequence[bool]] = False, + inplace: bool = True, + rename: str = None, +) -> dask.dataframe.DataFrame: + """Apply an offset to a column based on the values of other columns. + + Args: + df (dask.dataframe.DataFrame): Dataframe to use. Currently supports only dask dataframes. + target_column (str): Name of the column to apply the offset to. + offset_columns (str): Name of the column(s) to use for the offset. + weights (Union[float, Sequence[float]]): weights to apply on each column before adding. + Used also for changing sign. + reductions (Union[str, Sequence[str]], optional): Reduction function to use for the offset. + Defaults to "mean". Currently, only mean is supported. + preserve_mean (Union[bool, Sequence[bool]], optional): Whether to subtract the mean of the + offset column. Defaults to False. If a list is given, it must have the same length as + offset_columns. Otherwise the value passed is used for all columns. + inplace (bool, optional): Whether to apply the offset inplace. + If false, the new column will have the name provided by rename, or has the same name as + target_column with the suffix _offset if that is None. Defaults to True. + rename (str, optional): Name of the new column if inplace is False. Defaults to None. + Returns: + dask.dataframe.DataFrame: Dataframe with the new column. + """ + if target_column not in df.columns: + raise KeyError(f"{target_column} not in dataframe!") + + if isinstance(offset_columns, str): + offset_columns = [offset_columns] + elif not isinstance(offset_columns, Sequence): + raise TypeError(f"Invalid type for columns: {type(offset_columns)}") + if any(c not in df.columns for c in offset_columns): + raise KeyError(f"{offset_columns} not in dataframe!") + + if isinstance(weights, (int, float, np.floating, np.integer)): + weights = [weights] + elif not isinstance(weights, Sequence): + raise TypeError(f"Invalid type for signs: {type(weights)}") + if len(weights) != len(offset_columns): + raise ValueError("signs and offset_columns must have the same length!") + signs_dict = dict(zip(offset_columns, weights)) + + if isinstance(reductions, str) or reductions is None: + reductions = [reductions] * len(offset_columns) + elif not isinstance(reductions, Sequence): + raise ValueError(f"reductions must be a string or list of strings! not {type(reductions)}") + if any(r not in ["mean", None] for r in reductions): + raise NotImplementedError("Only reductions currently supported is 'mean'!") + + if isinstance(preserve_mean, bool): + preserve_mean = [preserve_mean] * len(offset_columns) + elif not isinstance(preserve_mean, Sequence): + raise TypeError(f"Invalid type for preserve_mean: {type(preserve_mean)}") + elif any(not isinstance(p, bool) for p in preserve_mean): + raise TypeError(f"Invalid type for preserve_mean: {type(preserve_mean)}") + if len(preserve_mean) != len(offset_columns): + raise ValueError("preserve_mean and offset_columns must have the same length!") + + if not inplace: + if rename is None: + rename = target_column + "_offset" + df[rename] = df[target_column] + target_column = rename + + if isinstance(df, pd.DataFrame): + raise NotImplementedError( + "Offsetting by other columns is currently not supported for pandas dataframes! " + "Please open a request on GitHub if this feature is required.", + ) + + # calculate the mean of the columns to reduce + means = { + col: dask.delayed(df[col].mean()) + for col, red, pm in zip(offset_columns, reductions, preserve_mean) + if red or pm + } + + # define the functions to apply the offsets + def shift_by_mean(x, cols, signs, means, flip_signs=False): + """Shift the target column by the mean of the offset columns.""" + for col in cols: + s = -signs[col] if flip_signs else signs[col] + x[target_column] = x[target_column] + s * means[col] + return x[target_column] + + def shift_by_row(x, cols, signs): + """Apply the offsets to the target column.""" + for col in cols: + x[target_column] = x[target_column] + signs[col] * x[col] + return x[target_column] + + # apply offset from the reduced columns + df[target_column] = df.map_partitions( + shift_by_mean, + cols=[col for col, red in zip(offset_columns, reductions) if red], + signs=signs_dict, + means=means, + meta=df[target_column].dtype, + ) + + # apply offset from the offset columns + df[target_column] = df.map_partitions( + shift_by_row, + cols=[col for col, red in zip(offset_columns, reductions) if not red], + signs=signs_dict, + meta=df[target_column].dtype, + ) + + # compensate shift from the preserved mean columns + if any(preserve_mean): + df[target_column] = df.map_partitions( + shift_by_mean, + cols=[col for col, pmean in zip(offset_columns, preserve_mean) if pmean], + signs=signs_dict, + means=means, + flip_signs=True, + meta=df[target_column].dtype, + ) + + return df
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/core/metadata.html b/sed/2.1.0/_modules/sed/core/metadata.html new file mode 100644 index 0000000..ce8be36 --- /dev/null +++ b/sed/2.1.0/_modules/sed/core/metadata.html @@ -0,0 +1,613 @@ + + + + + + + + + + sed.core.metadata — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.core.metadata

+"""This is a metadata handler class from the sed package
+"""
+import json
+from copy import deepcopy
+from typing import Any
+from typing import Dict
+
+from sed.core.config import complete_dictionary
+
+
+
[docs]class MetaHandler: + """This class provides methods to manipulate metadata dictionaries, + and give a nice representation of them.""" + + def __init__(self, meta: Dict = None) -> None: + self._m = deepcopy(meta) if meta is not None else {} + + def __getitem__(self, val: Any) -> None: + return self._m[val] + + def __repr__(self) -> str: + return json.dumps(self._m, default=str, indent=4) + + def _format_attributes(self, attributes, indent=0): + INDENT_FACTOR = 20 + html = "" + for key, value in attributes.items(): + # Format key + formatted_key = key.replace("_", " ").title() + formatted_key = f"<b>{formatted_key}</b>" + + html += f"<div style='padding-left: {indent * INDENT_FACTOR}px;'>" + if isinstance(value, dict): + html += f"<details><summary>{formatted_key} [{key}]</summary>" + html += self._format_attributes(value, indent + 1) + html += "</details>" + elif hasattr(value, "shape"): + html += f"{formatted_key} [{key}]: {value.shape}" + else: + html += f"{formatted_key} [{key}]: {value}" + html += "</div>" + return html + + def _repr_html_(self) -> str: + html = self._format_attributes(self._m) + return html + + @property + def metadata(self) -> Dict: + """Property returning the metadata dict. + Returns: + dict: Dictionary of metadata. + """ + return self._m + +
[docs] def add( + self, + entry: Any, + name: str, + duplicate_policy: str = "raise", + ) -> None: + """Add an entry to the metadata container + + Args: + entry: dictionary containing the metadata to add. + name: name of the dictionary key under which to add entry. + duplicate_policy: Control behavior in case the 'name' key + is already present in the metadata dictionary. Can be any of: + + - "raise": raises a DuplicateEntryError. + - "overwrite": overwrites the previous data with the new one. + - "merge": If ``entry`` is a dictionary, recursively merges it + into the existing one, overwriting existing entries. Otherwise + the same as "overwrite". + - "append": adds a trailing number, keeping both entries. + + Raises: + DuplicateEntryError: Raised if an entry already exists. + """ + if name not in self._m.keys() or duplicate_policy == "overwrite": + self._m[name] = deepcopy(entry) + elif duplicate_policy == "raise": + raise DuplicateEntryError( + f"an entry {name} already exists in metadata", + ) + elif duplicate_policy == "append": + i = 0 + while True: + i += 1 + newname = f"{name}_{i}" + if newname not in self._m.keys(): + break + self._m[newname] = deepcopy(entry) + + elif duplicate_policy == "merge": + if isinstance(self._m[name], dict): + if not isinstance(entry, dict): + raise ValueError( + "Cannot merge dictionary with non-dictionary entry!", + ) + complete_dictionary(self._m[name], deepcopy(entry)) + else: + self._m[name] = deepcopy(entry) + + else: + raise ValueError( + f"could not interpret duplication handling method {duplicate_policy}" + f"Please choose between overwrite,append or raise.", + )
+ + +
[docs]class DuplicateEntryError(Exception): + """Exception raised when attempting to add a duplicate entry to the metadata container. + + Attributes: + message -- explanation of the error + """ + + def __init__(self, message: str = "An entry already exists in metadata"): + self.message = message + super().__init__(self.message) + + def __str__(self): + return f"{self.__class__.__name__}: {self.message}"
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/core/processor.html b/sed/2.1.0/_modules/sed/core/processor.html new file mode 100644 index 0000000..8863f7f --- /dev/null +++ b/sed/2.1.0/_modules/sed/core/processor.html @@ -0,0 +1,3038 @@ + + + + + + + + + + sed.core.processor — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.core.processor

+"""This module contains the core class for the sed package
+
+"""
+import pathlib
+from datetime import datetime
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe as ddf
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+import psutil
+import xarray as xr
+
+from sed.binning import bin_dataframe
+from sed.binning.binning import normalization_histogram_from_timed_dataframe
+from sed.binning.binning import normalization_histogram_from_timestamps
+from sed.calibrator import DelayCalibrator
+from sed.calibrator import EnergyCalibrator
+from sed.calibrator import MomentumCorrector
+from sed.core.config import parse_config
+from sed.core.config import save_config
+from sed.core.dfops import add_time_stamped_data
+from sed.core.dfops import apply_filter
+from sed.core.dfops import apply_jitter
+from sed.core.metadata import MetaHandler
+from sed.diagnostics import grid_histogram
+from sed.io import to_h5
+from sed.io import to_nexus
+from sed.io import to_tiff
+from sed.loader import CopyTool
+from sed.loader import get_loader
+from sed.loader.mpes.loader import get_archiver_data
+from sed.loader.mpes.loader import MpesLoader
+
+N_CPU = psutil.cpu_count()
+
+
+
[docs]class SedProcessor: + """Processor class of sed. Contains wrapper functions defining a work flow for data + correction, calibration and binning. + + Args: + metadata (dict, optional): Dict of external Metadata. Defaults to None. + config (Union[dict, str], optional): Config dictionary or config file name. + Defaults to None. + dataframe (Union[pd.DataFrame, ddf.DataFrame], optional): dataframe to load + into the class. Defaults to None. + files (List[str], optional): List of files to pass to the loader defined in + the config. Defaults to None. + folder (str, optional): Folder containing files to pass to the loader + defined in the config. Defaults to None. + runs (Sequence[str], optional): List of run identifiers to pass to the loader + defined in the config. Defaults to None. + collect_metadata (bool): Option to collect metadata from files. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"] or False. + **kwds: Keyword arguments passed to the reader. + """ + + def __init__( + self, + metadata: dict = None, + config: Union[dict, str] = None, + dataframe: Union[pd.DataFrame, ddf.DataFrame] = None, + files: List[str] = None, + folder: str = None, + runs: Sequence[str] = None, + collect_metadata: bool = False, + verbose: bool = None, + **kwds, + ): + """Processor class of sed. Contains wrapper functions defining a work flow + for data correction, calibration, and binning. + + Args: + metadata (dict, optional): Dict of external Metadata. Defaults to None. + config (Union[dict, str], optional): Config dictionary or config file name. + Defaults to None. + dataframe (Union[pd.DataFrame, ddf.DataFrame], optional): dataframe to load + into the class. Defaults to None. + files (List[str], optional): List of files to pass to the loader defined in + the config. Defaults to None. + folder (str, optional): Folder containing files to pass to the loader + defined in the config. Defaults to None. + runs (Sequence[str], optional): List of run identifiers to pass to the loader + defined in the config. Defaults to None. + collect_metadata (bool, optional): Option to collect metadata from files. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"] or False. + **kwds: Keyword arguments passed to parse_config and to the reader. + """ + config_kwds = { + key: value for key, value in kwds.items() if key in parse_config.__code__.co_varnames + } + for key in config_kwds.keys(): + del kwds[key] + self._config = parse_config(config, **config_kwds) + num_cores = self._config.get("binning", {}).get("num_cores", N_CPU - 1) + if num_cores >= N_CPU: + num_cores = N_CPU - 1 + self._config["binning"]["num_cores"] = num_cores + + if verbose is None: + self.verbose = self._config["core"].get("verbose", False) + else: + self.verbose = verbose + + self._dataframe: Union[pd.DataFrame, ddf.DataFrame] = None + self._timed_dataframe: Union[pd.DataFrame, ddf.DataFrame] = None + self._files: List[str] = [] + + self._binned: xr.DataArray = None + self._pre_binned: xr.DataArray = None + self._normalization_histogram: xr.DataArray = None + self._normalized: xr.DataArray = None + + self._attributes = MetaHandler(meta=metadata) + + loader_name = self._config["core"]["loader"] + self.loader = get_loader( + loader_name=loader_name, + config=self._config, + ) + + self.ec = EnergyCalibrator( + loader=get_loader( + loader_name=loader_name, + config=self._config, + ), + config=self._config, + ) + + self.mc = MomentumCorrector( + config=self._config, + ) + + self.dc = DelayCalibrator( + config=self._config, + ) + + self.use_copy_tool = self._config.get("core", {}).get( + "use_copy_tool", + False, + ) + if self.use_copy_tool: + try: + self.ct = CopyTool( + source=self._config["core"]["copy_tool_source"], + dest=self._config["core"]["copy_tool_dest"], + **self._config["core"].get("copy_tool_kwds", {}), + ) + except KeyError: + self.use_copy_tool = False + + # Load data if provided: + if dataframe is not None or files is not None or folder is not None or runs is not None: + self.load( + dataframe=dataframe, + metadata=metadata, + files=files, + folder=folder, + runs=runs, + collect_metadata=collect_metadata, + **kwds, + ) + + def __repr__(self): + if self._dataframe is None: + df_str = "Dataframe: No Data loaded" + else: + df_str = self._dataframe.__repr__() + pretty_str = df_str + "\n" + "Metadata: " + "\n" + self._attributes.__repr__() + return pretty_str + + def _repr_html_(self): + html = "<div>" + + if self._dataframe is None: + df_html = "Dataframe: No Data loaded" + else: + df_html = self._dataframe._repr_html_() + + html += f"<details><summary>Dataframe</summary>{df_html}</details>" + + # Add expandable section for attributes + html += "<details><summary>Metadata</summary>" + html += "<div style='padding-left: 10px;'>" + html += self._attributes._repr_html_() + html += "</div></details>" + + html += "</div>" + + return html + + ## Suggestion: + # @property + # def overview_panel(self): + # """Provides an overview panel with plots of different data attributes.""" + # self.view_event_histogram(dfpid=2, backend="matplotlib") + + @property + def dataframe(self) -> Union[pd.DataFrame, ddf.DataFrame]: + """Accessor to the underlying dataframe. + + Returns: + Union[pd.DataFrame, ddf.DataFrame]: Dataframe object. + """ + return self._dataframe + + @dataframe.setter + def dataframe(self, dataframe: Union[pd.DataFrame, ddf.DataFrame]): + """Setter for the underlying dataframe. + + Args: + dataframe (Union[pd.DataFrame, ddf.DataFrame]): The dataframe object to set. + """ + if not isinstance(dataframe, (pd.DataFrame, ddf.DataFrame)) or not isinstance( + dataframe, + self._dataframe.__class__, + ): + raise ValueError( + "'dataframe' has to be a Pandas or Dask dataframe and has to be of the same kind " + "as the dataframe loaded into the SedProcessor!.\n" + f"Loaded type: {self._dataframe.__class__}, provided type: {dataframe}.", + ) + self._dataframe = dataframe + + @property + def timed_dataframe(self) -> Union[pd.DataFrame, ddf.DataFrame]: + """Accessor to the underlying timed_dataframe. + + Returns: + Union[pd.DataFrame, ddf.DataFrame]: Timed Dataframe object. + """ + return self._timed_dataframe + + @timed_dataframe.setter + def timed_dataframe(self, timed_dataframe: Union[pd.DataFrame, ddf.DataFrame]): + """Setter for the underlying timed dataframe. + + Args: + timed_dataframe (Union[pd.DataFrame, ddf.DataFrame]): The timed dataframe object to set + """ + if not isinstance(timed_dataframe, (pd.DataFrame, ddf.DataFrame)) or not isinstance( + timed_dataframe, + self._timed_dataframe.__class__, + ): + raise ValueError( + "'timed_dataframe' has to be a Pandas or Dask dataframe and has to be of the same " + "kind as the dataframe loaded into the SedProcessor!.\n" + f"Loaded type: {self._timed_dataframe.__class__}, " + f"provided type: {timed_dataframe}.", + ) + self._timed_dataframe = timed_dataframe + + @property + def attributes(self) -> MetaHandler: + """Accessor to the metadata dict. + + Returns: + MetaHandler: The metadata object + """ + return self._attributes + +
[docs] def add_attribute(self, attributes: dict, name: str, **kwds): + """Function to add element to the attributes dict. + + Args: + attributes (dict): The attributes dictionary object to add. + name (str): Key under which to add the dictionary to the attributes. + """ + self._attributes.add( + entry=attributes, + name=name, + **kwds, + )
+ + @property + def config(self) -> Dict[Any, Any]: + """Getter attribute for the config dictionary + + Returns: + Dict: The config dictionary. + """ + return self._config + + @property + def files(self) -> List[str]: + """Getter attribute for the list of files + + Returns: + List[str]: The list of loaded files + """ + return self._files + + @property + def binned(self) -> xr.DataArray: + """Getter attribute for the binned data array + + Returns: + xr.DataArray: The binned data array + """ + if self._binned is None: + raise ValueError("No binned data available, need to compute histogram first!") + return self._binned + + @property + def normalized(self) -> xr.DataArray: + """Getter attribute for the normalized data array + + Returns: + xr.DataArray: The normalized data array + """ + if self._normalized is None: + raise ValueError( + "No normalized data available, compute data with normalization enabled!", + ) + return self._normalized + + @property + def normalization_histogram(self) -> xr.DataArray: + """Getter attribute for the normalization histogram + + Returns: + xr.DataArray: The normalization histogram + """ + if self._normalization_histogram is None: + raise ValueError("No normalization histogram available, generate histogram first!") + return self._normalization_histogram + +
[docs] def cpy(self, path: Union[str, List[str]]) -> Union[str, List[str]]: + """Function to mirror a list of files or a folder from a network drive to a + local storage. Returns either the original or the copied path to the given + path. The option to use this functionality is set by + config["core"]["use_copy_tool"]. + + Args: + path (Union[str, List[str]]): Source path or path list. + + Returns: + Union[str, List[str]]: Source or destination path or path list. + """ + if self.use_copy_tool: + if isinstance(path, list): + path_out = [] + for file in path: + path_out.append(self.ct.copy(file)) + return path_out + + return self.ct.copy(path) + + if isinstance(path, list): + return path + + return path
+ +
[docs] def load( + self, + dataframe: Union[pd.DataFrame, ddf.DataFrame] = None, + metadata: dict = None, + files: List[str] = None, + folder: str = None, + runs: Sequence[str] = None, + collect_metadata: bool = False, + **kwds, + ): + """Load tabular data of single events into the dataframe object in the class. + + Args: + dataframe (Union[pd.DataFrame, ddf.DataFrame], optional): data in tabular + format. Accepts anything which can be interpreted by pd.DataFrame as + an input. Defaults to None. + metadata (dict, optional): Dict of external Metadata. Defaults to None. + files (List[str], optional): List of file paths to pass to the loader. + Defaults to None. + runs (Sequence[str], optional): List of run identifiers to pass to the + loader. Defaults to None. + folder (str, optional): Folder path to pass to the loader. + Defaults to None. + collect_metadata (bool, optional): Option for collecting metadata in the reader. + **kwds: Keyword parameters passed to the reader. + + Raises: + ValueError: Raised if no valid input is provided. + """ + if metadata is None: + metadata = {} + if dataframe is not None: + timed_dataframe = kwds.pop("timed_dataframe", None) + elif runs is not None: + # If runs are provided, we only use the copy tool if also folder is provided. + # In that case, we copy the whole provided base folder tree, and pass the copied + # version to the loader as base folder to look for the runs. + if folder is not None: + dataframe, timed_dataframe, metadata = self.loader.read_dataframe( + folders=cast(str, self.cpy(folder)), + runs=runs, + metadata=metadata, + collect_metadata=collect_metadata, + **kwds, + ) + else: + dataframe, timed_dataframe, metadata = self.loader.read_dataframe( + runs=runs, + metadata=metadata, + collect_metadata=collect_metadata, + **kwds, + ) + + elif folder is not None: + dataframe, timed_dataframe, metadata = self.loader.read_dataframe( + folders=cast(str, self.cpy(folder)), + metadata=metadata, + collect_metadata=collect_metadata, + **kwds, + ) + elif files is not None: + dataframe, timed_dataframe, metadata = self.loader.read_dataframe( + files=cast(List[str], self.cpy(files)), + metadata=metadata, + collect_metadata=collect_metadata, + **kwds, + ) + else: + raise ValueError( + "Either 'dataframe', 'files', 'folder', or 'runs' needs to be provided!", + ) + + self._dataframe = dataframe + self._timed_dataframe = timed_dataframe + self._files = self.loader.files + + for key in metadata: + self._attributes.add( + entry=metadata[key], + name=key, + duplicate_policy="merge", + )
+ +
[docs] def filter_column( + self, + column: str, + min_value: float = -np.inf, + max_value: float = np.inf, + ) -> None: + """Filter values in a column which are outside of a given range + + Args: + column (str): Name of the column to filter + min_value (float, optional): Minimum value to keep. Defaults to None. + max_value (float, optional): Maximum value to keep. Defaults to None. + """ + if column != "index" and column not in self._dataframe.columns: + raise KeyError(f"Column {column} not found in dataframe!") + if min_value >= max_value: + raise ValueError("min_value has to be smaller than max_value!") + if self._dataframe is not None: + self._dataframe = apply_filter( + self._dataframe, + col=column, + lower_bound=min_value, + upper_bound=max_value, + ) + if self._timed_dataframe is not None and column in self._timed_dataframe.columns: + self._timed_dataframe = apply_filter( + self._timed_dataframe, + column, + lower_bound=min_value, + upper_bound=max_value, + ) + metadata = { + "filter": { + "column": column, + "min_value": min_value, + "max_value": max_value, + }, + } + self._attributes.add(metadata, "filter", duplicate_policy="merge")
+ + # Momentum calibration workflow + # 1. Bin raw detector data for distortion correction +
[docs] def bin_and_load_momentum_calibration( + self, + df_partitions: Union[int, Sequence[int]] = 100, + axes: List[str] = None, + bins: List[int] = None, + ranges: Sequence[Tuple[float, float]] = None, + plane: int = 0, + width: int = 5, + apply: bool = False, + **kwds, + ): + """1st step of momentum correction work flow. Function to do an initial binning + of the dataframe loaded to the class, slice a plane from it using an + interactive view, and load it into the momentum corrector class. + + Args: + df_partitions (Union[int, Sequence[int]], optional): Number of dataframe partitions + to use for the initial binning. Defaults to 100. + axes (List[str], optional): Axes to bin. + Defaults to config["momentum"]["axes"]. + bins (List[int], optional): Bin numbers to use for binning. + Defaults to config["momentum"]["bins"]. + ranges (List[Tuple], optional): Ranges to use for binning. + Defaults to config["momentum"]["ranges"]. + plane (int, optional): Initial value for the plane slider. Defaults to 0. + width (int, optional): Initial value for the width slider. Defaults to 5. + apply (bool, optional): Option to directly apply the values and select the + slice. Defaults to False. + **kwds: Keyword argument passed to the pre_binning function. + """ + self._pre_binned = self.pre_binning( + df_partitions=df_partitions, + axes=axes, + bins=bins, + ranges=ranges, + **kwds, + ) + + self.mc.load_data(data=self._pre_binned) + self.mc.select_slicer(plane=plane, width=width, apply=apply)
+ + # 2. Generate the spline warp correction from momentum features. + # Either autoselect features, or input features from view above. +
[docs] def define_features( + self, + features: np.ndarray = None, + rotation_symmetry: int = 6, + auto_detect: bool = False, + include_center: bool = True, + apply: bool = False, + **kwds, + ): + """2. Step of the distortion correction workflow: Define feature points in + momentum space. They can be either manually selected using a GUI tool, be + provided as list of feature points, or auto-generated using a + feature-detection algorithm. + + Args: + features (np.ndarray, optional): np.ndarray of features. Defaults to None. + rotation_symmetry (int, optional): Number of rotational symmetry axes. + Defaults to 6. + auto_detect (bool, optional): Whether to auto-detect the features. + Defaults to False. + include_center (bool, optional): Option to include a point at the center + in the feature list. Defaults to True. + apply (bool, optional): Option to directly apply the values and select the + slice. Defaults to False. + **kwds: Keyword arguments for ``MomentumCorrector.feature_extract()`` and + ``MomentumCorrector.feature_select()``. + """ + if auto_detect: # automatic feature selection + sigma = kwds.pop("sigma", self._config["momentum"]["sigma"]) + fwhm = kwds.pop("fwhm", self._config["momentum"]["fwhm"]) + sigma_radius = kwds.pop( + "sigma_radius", + self._config["momentum"]["sigma_radius"], + ) + self.mc.feature_extract( + sigma=sigma, + fwhm=fwhm, + sigma_radius=sigma_radius, + rotsym=rotation_symmetry, + **kwds, + ) + features = self.mc.peaks + + self.mc.feature_select( + rotsym=rotation_symmetry, + include_center=include_center, + features=features, + apply=apply, + **kwds, + )
+ + # 3. Generate the spline warp correction from momentum features. + # If no features have been selected before, use class defaults. +
[docs] def generate_splinewarp( + self, + use_center: bool = None, + verbose: bool = None, + **kwds, + ): + """3. Step of the distortion correction workflow: Generate the correction + function restoring the symmetry in the image using a splinewarp algorithm. + + Args: + use_center (bool, optional): Option to use the position of the + center point in the correction. Default is read from config, or set to True. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword arguments for MomentumCorrector.spline_warp_estimate(). + """ + if verbose is None: + verbose = self.verbose + + self.mc.spline_warp_estimate(use_center=use_center, verbose=verbose, **kwds) + + if self.mc.slice is not None and verbose: + print("Original slice with reference features") + self.mc.view(annotated=True, backend="bokeh", crosshair=True) + + print("Corrected slice with target features") + self.mc.view( + image=self.mc.slice_corrected, + annotated=True, + points={"feats": self.mc.ptargs}, + backend="bokeh", + crosshair=True, + ) + + print("Original slice with target features") + self.mc.view( + image=self.mc.slice, + points={"feats": self.mc.ptargs}, + annotated=True, + backend="bokeh", + )
+ + # 3a. Save spline-warp parameters to config file. +
[docs] def save_splinewarp( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated spline-warp parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.mc.correction) == 0: + raise ValueError("No momentum correction parameters to save!") + correction = {} + for key, value in self.mc.correction.items(): + if key in ["reference_points", "target_points", "cdeform_field", "rdeform_field"]: + continue + if key in ["use_center", "rotation_symmetry"]: + correction[key] = value + elif key in ["center_point", "ascale"]: + correction[key] = [float(i) for i in value] + elif key in ["outer_points", "feature_points"]: + correction[key] = [] + for point in value: + correction[key].append([float(i) for i in point]) + else: + correction[key] = float(value) + + if "creation_date" not in correction: + correction["creation_date"] = datetime.now().timestamp() + + config = { + "momentum": { + "correction": correction, + }, + } + save_config(config, filename, overwrite) + print(f'Saved momentum correction parameters to "{filename}".')
+ + # 4. Pose corrections. Provide interactive interface for correcting + # scaling, shift and rotation +
[docs] def pose_adjustment( + self, + transformations: Dict[str, Any] = None, + apply: bool = False, + use_correction: bool = True, + reset: bool = True, + verbose: bool = None, + **kwds, + ): + """3. step of the distortion correction workflow: Generate an interactive panel + to adjust affine transformations that are applied to the image. Applies first + a scaling, next an x/y translation, and last a rotation around the center of + the image. + + Args: + transformations (dict, optional): Dictionary with transformations. + Defaults to self.transformations or config["momentum"]["transformations"]. + apply (bool, optional): Option to directly apply the provided + transformations. Defaults to False. + use_correction (bool, option): Whether to use the spline warp correction + or not. Defaults to True. + reset (bool, optional): Option to reset the correction before transformation. + Defaults to True. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword parameters defining defaults for the transformations: + + - **scale** (float): Initial value of the scaling slider. + - **xtrans** (float): Initial value of the xtrans slider. + - **ytrans** (float): Initial value of the ytrans slider. + - **angle** (float): Initial value of the angle slider. + """ + if verbose is None: + verbose = self.verbose + + # Generate homography as default if no distortion correction has been applied + if self.mc.slice_corrected is None: + if self.mc.slice is None: + self.mc.slice = np.zeros(self._config["momentum"]["bins"][0:2]) + self.mc.slice_corrected = self.mc.slice + + if not use_correction: + self.mc.reset_deformation() + + if self.mc.cdeform_field is None or self.mc.rdeform_field is None: + # Generate distortion correction from config values + self.mc.spline_warp_estimate(verbose=verbose) + + self.mc.pose_adjustment( + transformations=transformations, + apply=apply, + reset=reset, + verbose=verbose, + **kwds, + )
+ + # 4a. Save pose adjustment parameters to config file. +
[docs] def save_transformations( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the pose adjustment parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.mc.transformations) == 0: + raise ValueError("No momentum transformation parameters to save!") + transformations = {} + for key, value in self.mc.transformations.items(): + transformations[key] = float(value) + + if "creation_date" not in transformations: + transformations["creation_date"] = datetime.now().timestamp() + + config = { + "momentum": { + "transformations": transformations, + }, + } + save_config(config, filename, overwrite) + print(f'Saved momentum transformation parameters to "{filename}".')
+ + # 5. Apply the momentum correction to the dataframe +
[docs] def apply_momentum_correction( + self, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """Applies the distortion correction and pose adjustment (optional) + to the dataframe. + + Args: + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword parameters for ``MomentumCorrector.apply_correction``: + + - **rdeform_field** (np.ndarray, optional): Row deformation field. + - **cdeform_field** (np.ndarray, optional): Column deformation field. + - **inv_dfield** (np.ndarray, optional): Inverse deformation field. + + """ + if verbose is None: + verbose = self.verbose + + x_column = self._config["dataframe"]["x_column"] + y_column = self._config["dataframe"]["y_column"] + + if self._dataframe is not None: + if verbose: + print("Adding corrected X/Y columns to dataframe:") + df, metadata = self.mc.apply_corrections( + df=self._dataframe, + verbose=verbose, + **kwds, + ) + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + tdf, _ = self.mc.apply_corrections( + self._timed_dataframe, + verbose=False, + **kwds, + ) + + # Add Metadata + self._attributes.add( + metadata, + "momentum_correction", + duplicate_policy="merge", + ) + self._dataframe = df + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe)
+ + # Momentum calibration work flow + # 1. Calculate momentum calibration +
[docs] def calibrate_momentum_axes( + self, + point_a: Union[np.ndarray, List[int]] = None, + point_b: Union[np.ndarray, List[int]] = None, + k_distance: float = None, + k_coord_a: Union[np.ndarray, List[float]] = None, + k_coord_b: Union[np.ndarray, List[float]] = np.array([0.0, 0.0]), + equiscale: bool = True, + apply=False, + ): + """1. step of the momentum calibration workflow. Calibrate momentum + axes using either provided pixel coordinates of a high-symmetry point and its + distance to the BZ center, or the k-coordinates of two points in the BZ + (depending on the equiscale option). Opens an interactive panel for selecting + the points. + + Args: + point_a (Union[np.ndarray, List[int]]): Pixel coordinates of the first + point used for momentum calibration. + point_b (Union[np.ndarray, List[int]], optional): Pixel coordinates of the + second point used for momentum calibration. + Defaults to config["momentum"]["center_pixel"]. + k_distance (float, optional): Momentum distance between point a and b. + Needs to be provided if no specific k-coordinates for the two points + are given. Defaults to None. + k_coord_a (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the first point used for calibration. Used if equiscale is False. + Defaults to None. + k_coord_b (Union[np.ndarray, List[float]], optional): Momentum coordinate + of the second point used for calibration. Defaults to [0.0, 0.0]. + equiscale (bool, optional): Option to apply different scales to kx and ky. + If True, the distance between points a and b, and the absolute + position of point a are used for defining the scale. If False, the + scale is calculated from the k-positions of both points a and b. + Defaults to True. + apply (bool, optional): Option to directly store the momentum calibration + in the class. Defaults to False. + """ + if point_b is None: + point_b = self._config["momentum"]["center_pixel"] + + self.mc.select_k_range( + point_a=point_a, + point_b=point_b, + k_distance=k_distance, + k_coord_a=k_coord_a, + k_coord_b=k_coord_b, + equiscale=equiscale, + apply=apply, + )
+ + # 1a. Save momentum calibration parameters to config file. +
[docs] def save_momentum_calibration( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated momentum calibration parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.mc.calibration) == 0: + raise ValueError("No momentum calibration parameters to save!") + calibration = {} + for key, value in self.mc.calibration.items(): + if key in ["kx_axis", "ky_axis", "grid", "extent"]: + continue + + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() + + config = {"momentum": {"calibration": calibration}} + save_config(config, filename, overwrite) + print(f"Saved momentum calibration parameters to {filename}")
+ + # 2. Apply correction and calibration to the dataframe +
[docs] def apply_momentum_calibration( + self, + calibration: dict = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """2. step of the momentum calibration work flow: Apply the momentum + calibration stored in the class to the dataframe. If corrected X/Y axis exist, + these are used. + + Args: + calibration (dict, optional): Optional dictionary with calibration data to + use. Defaults to None. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword args passed to ``DelayCalibrator.append_delay_axis``. + """ + if verbose is None: + verbose = self.verbose + + x_column = self._config["dataframe"]["x_column"] + y_column = self._config["dataframe"]["y_column"] + + if self._dataframe is not None: + if verbose: + print("Adding kx/ky columns to dataframe:") + df, metadata = self.mc.append_k_axis( + df=self._dataframe, + calibration=calibration, + **kwds, + ) + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + tdf, _ = self.mc.append_k_axis( + df=self._timed_dataframe, + calibration=calibration, + **kwds, + ) + + # Add Metadata + self._attributes.add( + metadata, + "momentum_calibration", + duplicate_policy="merge", + ) + self._dataframe = df + if ( + self._timed_dataframe is not None + and x_column in self._timed_dataframe.columns + and y_column in self._timed_dataframe.columns + ): + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe)
+ + # Energy correction workflow + # 1. Adjust the energy correction parameters +
[docs] def adjust_energy_correction( + self, + correction_type: str = None, + amplitude: float = None, + center: Tuple[float, float] = None, + apply=False, + **kwds, + ): + """1. step of the energy correction workflow: Opens an interactive plot to + adjust the parameters for the TOF/energy correction. Also pre-bins the data if + they are not present yet. + + Args: + correction_type (str, optional): Type of correction to apply to the TOF + axis. Valid values are: + + - 'spherical' + - 'Lorentzian' + - 'Gaussian' + - 'Lorentzian_asymmetric' + + Defaults to config["energy"]["correction_type"]. + amplitude (float, optional): Amplitude of the correction. + Defaults to config["energy"]["correction"]["amplitude"]. + center (Tuple[float, float], optional): Center X/Y coordinates for the + correction. Defaults to config["energy"]["correction"]["center"]. + apply (bool, optional): Option to directly apply the provided or default + correction parameters. Defaults to False. + **kwds: Keyword parameters passed to ``EnergyCalibrator.adjust_energy_correction()``. + """ + if self._pre_binned is None: + print( + "Pre-binned data not present, binning using defaults from config...", + ) + self._pre_binned = self.pre_binning() + + self.ec.adjust_energy_correction( + self._pre_binned, + correction_type=correction_type, + amplitude=amplitude, + center=center, + apply=apply, + **kwds, + )
+ + # 1a. Save energy correction parameters to config file. +
[docs] def save_energy_correction( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated energy correction parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.ec.correction) == 0: + raise ValueError("No energy correction parameters to save!") + correction = {} + for key, val in self.ec.correction.items(): + if key == "correction_type": + correction[key] = val + elif key == "center": + correction[key] = [float(i) for i in val] + else: + correction[key] = float(val) + + if "creation_date" not in correction: + correction["creation_date"] = datetime.now().timestamp() + + config = {"energy": {"correction": correction}} + save_config(config, filename, overwrite) + print(f"Saved energy correction parameters to {filename}")
+ + # 2. Apply energy correction to dataframe +
[docs] def apply_energy_correction( + self, + correction: dict = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """2. step of the energy correction workflow: Apply the energy correction + parameters stored in the class to the dataframe. + + Args: + correction (dict, optional): Dictionary containing the correction + parameters. Defaults to config["energy"]["calibration"]. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: + Keyword args passed to ``EnergyCalibrator.apply_energy_correction()``. + """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + + if self._dataframe is not None: + if verbose: + print("Applying energy correction to dataframe...") + df, metadata = self.ec.apply_energy_correction( + df=self._dataframe, + correction=correction, + verbose=verbose, + **kwds, + ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.apply_energy_correction( + df=self._timed_dataframe, + correction=correction, + verbose=False, + **kwds, + ) + + # Add Metadata + self._attributes.add( + metadata, + "energy_correction", + ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe)
+ + # Energy calibrator workflow + # 1. Load and normalize data +
[docs] def load_bias_series( + self, + binned_data: Union[xr.DataArray, Tuple[np.ndarray, np.ndarray, np.ndarray]] = None, + data_files: List[str] = None, + axes: List[str] = None, + bins: List = None, + ranges: Sequence[Tuple[float, float]] = None, + biases: np.ndarray = None, + bias_key: str = None, + normalize: bool = None, + span: int = None, + order: int = None, + ): + """1. step of the energy calibration workflow: Load and bin data from + single-event files, or load binned bias/TOF traces. + + Args: + binned_data (Union[xr.DataArray, Tuple[np.ndarray, np.ndarray, np.ndarray]], optional): + Binned data If provided as DataArray, Needs to contain dimensions + config["dataframe"]["tof_column"] and config["dataframe"]["bias_column"]. If + provided as tuple, needs to contain elements tof, biases, traces. + data_files (List[str], optional): list of file paths to bin + axes (List[str], optional): bin axes. + Defaults to config["dataframe"]["tof_column"]. + bins (List, optional): number of bins. + Defaults to config["energy"]["bins"]. + ranges (Sequence[Tuple[float, float]], optional): bin ranges. + Defaults to config["energy"]["ranges"]. + biases (np.ndarray, optional): Bias voltages used. If missing, bias + voltages are extracted from the data files. + bias_key (str, optional): hdf5 path where bias values are stored. + Defaults to config["energy"]["bias_key"]. + normalize (bool, optional): Option to normalize traces. + Defaults to config["energy"]["normalize"]. + span (int, optional): span smoothing parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``). + Defaults to config["energy"]["normalize_span"]. + order (int, optional): order smoothing parameters of the LOESS method + (see ``scipy.signal.savgol_filter()``). + Defaults to config["energy"]["normalize_order"]. + """ + if binned_data is not None: + if isinstance(binned_data, xr.DataArray): + if ( + self._config["dataframe"]["tof_column"] not in binned_data.dims + or self._config["dataframe"]["bias_column"] not in binned_data.dims + ): + raise ValueError( + "If binned_data is provided as an xarray, it needs to contain dimensions " + f"'{self._config['dataframe']['tof_column']}' and " + f"'{self._config['dataframe']['bias_column']}'!.", + ) + tof = binned_data.coords[self._config["dataframe"]["tof_column"]].values + biases = binned_data.coords[self._config["dataframe"]["bias_column"]].values + traces = binned_data.values[:, :] + else: + try: + (tof, biases, traces) = binned_data + except ValueError as exc: + raise ValueError( + "If binned_data is provided as tuple, it needs to contain " + "(tof, biases, traces)!", + ) from exc + self.ec.load_data(biases=biases, traces=traces, tof=tof) + + elif data_files is not None: + self.ec.bin_data( + data_files=cast(List[str], self.cpy(data_files)), + axes=axes, + bins=bins, + ranges=ranges, + biases=biases, + bias_key=bias_key, + ) + + else: + raise ValueError("Either binned_data or data_files needs to be provided!") + + if (normalize is not None and normalize is True) or ( + normalize is None and self._config["energy"]["normalize"] + ): + if span is None: + span = self._config["energy"]["normalize_span"] + if order is None: + order = self._config["energy"]["normalize_order"] + self.ec.normalize(smooth=True, span=span, order=order) + self.ec.view( + traces=self.ec.traces_normed, + xaxis=self.ec.tof, + backend="bokeh", + )
+ + # 2. extract ranges and get peak positions +
[docs] def find_bias_peaks( + self, + ranges: Union[List[Tuple], Tuple], + ref_id: int = 0, + infer_others: bool = True, + mode: str = "replace", + radius: int = None, + peak_window: int = None, + apply: bool = False, + ): + """2. step of the energy calibration workflow: Find a peak within a given range + for the indicated reference trace, and tries to find the same peak for all + other traces. Uses fast_dtw to align curves, which might not be too good if the + shape of curves changes qualitatively. Ideally, choose a reference trace in the + middle of the set, and don't choose the range too narrow around the peak. + Alternatively, a list of ranges for all traces can be provided. + + Args: + ranges (Union[List[Tuple], Tuple]): Tuple of TOF values indicating a range. + Alternatively, a list of ranges for all traces can be given. + ref_id (int, optional): The id of the trace the range refers to. + Defaults to 0. + infer_others (bool, optional): Whether to determine the range for the other + traces. Defaults to True. + mode (str, optional): Whether to "add" or "replace" existing ranges. + Defaults to "replace". + radius (int, optional): Radius parameter for fast_dtw. + Defaults to config["energy"]["fastdtw_radius"]. + peak_window (int, optional): Peak_window parameter for the peak detection + algorithm. amount of points that have to have to behave monotonously + around a peak. Defaults to config["energy"]["peak_window"]. + apply (bool, optional): Option to directly apply the provided parameters. + Defaults to False. + """ + if radius is None: + radius = self._config["energy"]["fastdtw_radius"] + if peak_window is None: + peak_window = self._config["energy"]["peak_window"] + if not infer_others: + self.ec.add_ranges( + ranges=ranges, + ref_id=ref_id, + infer_others=infer_others, + mode=mode, + radius=radius, + ) + print(self.ec.featranges) + try: + self.ec.feature_extract(peak_window=peak_window) + self.ec.view( + traces=self.ec.traces_normed, + segs=self.ec.featranges, + xaxis=self.ec.tof, + peaks=self.ec.peaks, + backend="bokeh", + ) + except IndexError: + print("Could not determine all peaks!") + raise + else: + # New adjustment tool + assert isinstance(ranges, tuple) + self.ec.adjust_ranges( + ranges=ranges, + ref_id=ref_id, + traces=self.ec.traces_normed, + infer_others=infer_others, + radius=radius, + peak_window=peak_window, + apply=apply, + )
+ + # 3. Fit the energy calibration relation +
[docs] def calibrate_energy_axis( + self, + ref_id: int, + ref_energy: float, + method: str = None, + energy_scale: str = None, + verbose: bool = None, + **kwds, + ): + """3. Step of the energy calibration workflow: Calculate the calibration + function for the energy axis, and apply it to the dataframe. Two + approximations are implemented, a (normally 3rd order) polynomial + approximation, and a d^2/(t-t0)^2 relation. + + Args: + ref_id (int): id of the trace at the bias where the reference energy is + given. + ref_energy (float): Absolute energy of the detected feature at the bias + of ref_id + method (str, optional): Method for determining the energy calibration. + + - **'lmfit'**: Energy calibration using lmfit and 1/t^2 form. + - **'lstsq'**, **'lsqr'**: Energy calibration using polynomial form. + + Defaults to config["energy"]["calibration_method"] + energy_scale (str, optional): Direction of increasing energy scale. + + - **'kinetic'**: increasing energy with decreasing TOF. + - **'binding'**: increasing energy with increasing TOF. + + Defaults to config["energy"]["energy_scale"] + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds**: Keyword parameters passed to ``EnergyCalibrator.calibrate()``. + """ + if verbose is None: + verbose = self.verbose + + if method is None: + method = self._config["energy"]["calibration_method"] + + if energy_scale is None: + energy_scale = self._config["energy"]["energy_scale"] + + self.ec.calibrate( + ref_id=ref_id, + ref_energy=ref_energy, + method=method, + energy_scale=energy_scale, + verbose=verbose, + **kwds, + ) + if verbose: + print("Quality of Calibration:") + self.ec.view( + traces=self.ec.traces_normed, + xaxis=self.ec.calibration["axis"], + align=True, + energy_scale=energy_scale, + backend="bokeh", + ) + print("E/TOF relationship:") + self.ec.view( + traces=self.ec.calibration["axis"][None, :], + xaxis=self.ec.tof, + backend="matplotlib", + show_legend=False, + ) + if energy_scale == "kinetic": + plt.scatter( + self.ec.peaks[:, 0], + -(self.ec.biases - self.ec.biases[ref_id]) + ref_energy, + s=50, + c="k", + ) + elif energy_scale == "binding": + plt.scatter( + self.ec.peaks[:, 0], + self.ec.biases - self.ec.biases[ref_id] + ref_energy, + s=50, + c="k", + ) + else: + raise ValueError( + 'energy_scale needs to be either "binding" or "kinetic"', + f", got {energy_scale}.", + ) + plt.xlabel("Time-of-flight", fontsize=15) + plt.ylabel("Energy (eV)", fontsize=15) + plt.show()
+ + # 3a. Save energy calibration parameters to config file. +
[docs] def save_energy_calibration( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated energy calibration parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.ec.calibration) == 0: + raise ValueError("No energy calibration parameters to save!") + calibration = {} + for key, value in self.ec.calibration.items(): + if key in ["axis", "refid", "Tmat", "bvec"]: + continue + if key == "energy_scale": + calibration[key] = value + elif key == "coeffs": + calibration[key] = [float(i) for i in value] + else: + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() + + config = {"energy": {"calibration": calibration}} + save_config(config, filename, overwrite) + print(f'Saved energy calibration parameters to "{filename}".')
+ + # 4. Apply energy calibration to the dataframe +
[docs] def append_energy_axis( + self, + calibration: dict = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """4. step of the energy calibration workflow: Apply the calibration function + to to the dataframe. Two approximations are implemented, a (normally 3rd order) + polynomial approximation, and a d^2/(t-t0)^2 relation. a calibration dictionary + can be provided. + + Args: + calibration (dict, optional): Calibration dict containing calibration + parameters. Overrides calibration from class or config. + Defaults to None. + preview (bool): Option to preview the first elements of the data frame. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: + Keyword args passed to ``EnergyCalibrator.append_energy_axis()``. + """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + + if self._dataframe is not None: + if verbose: + print("Adding energy column to dataframe:") + df, metadata = self.ec.append_energy_axis( + df=self._dataframe, + calibration=calibration, + verbose=verbose, + **kwds, + ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.append_energy_axis( + df=self._timed_dataframe, + calibration=calibration, + verbose=False, + **kwds, + ) + + # Add Metadata + self._attributes.add( + metadata, + "energy_calibration", + duplicate_policy="merge", + ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe)
+ +
[docs] def add_energy_offset( + self, + constant: float = None, + columns: Union[str, Sequence[str]] = None, + weights: Union[float, Sequence[float]] = None, + reductions: Union[str, Sequence[str]] = None, + preserve_mean: Union[bool, Sequence[bool]] = None, + preview: bool = False, + verbose: bool = None, + ) -> None: + """Shift the energy axis of the dataframe by a given amount. + + Args: + constant (float, optional): The constant to shift the energy axis by. + columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. + weights (Union[float, Sequence[float]]): weights to apply to the columns. + Can also be used to flip the sign (e.g. -1). Defaults to 1. + preserve_mean (bool): Whether to subtract the mean of the column before applying the + shift. Defaults to False. + reductions (str): The reduction to apply to the column. Should be an available method + of dask.dataframe.Series. For example "mean". In this case the function is applied + to the column to generate a single value for the whole dataset. If None, the shift + is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + + Raises: + ValueError: If the energy column is not in the dataframe. + """ + if verbose is None: + verbose = self.verbose + + energy_column = self._config["dataframe"]["energy_column"] + if energy_column not in self._dataframe.columns: + raise ValueError( + f"Energy column {energy_column} not found in dataframe! " + "Run `append_energy_axis()` first.", + ) + if self.dataframe is not None: + if verbose: + print("Adding energy offset to dataframe:") + df, metadata = self.ec.add_offsets( + df=self._dataframe, + constant=constant, + columns=columns, + energy_column=energy_column, + weights=weights, + reductions=reductions, + preserve_mean=preserve_mean, + verbose=verbose, + ) + if self._timed_dataframe is not None and energy_column in self._timed_dataframe.columns: + tdf, _ = self.ec.add_offsets( + df=self._timed_dataframe, + constant=constant, + columns=columns, + energy_column=energy_column, + weights=weights, + reductions=reductions, + preserve_mean=preserve_mean, + ) + + self._attributes.add( + metadata, + "add_energy_offset", + # TODO: allow only appending when no offset along this column(s) was applied + # TODO: clear memory of modifications if the energy axis is recalculated + duplicate_policy="append", + ) + self._dataframe = df + if self._timed_dataframe is not None and energy_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + elif verbose: + print(self._dataframe)
+ +
[docs] def save_energy_offset( + self, + filename: str = None, + overwrite: bool = False, + ): + """Save the generated energy calibration parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.ec.offsets) == 0: + raise ValueError("No energy offset parameters to save!") + + if "creation_date" not in self.ec.offsets.keys(): + self.ec.offsets["creation_date"] = datetime.now().timestamp() + + config = {"energy": {"offsets": self.ec.offsets}} + save_config(config, filename, overwrite) + print(f'Saved energy offset parameters to "{filename}".')
+ +
[docs] def append_tof_ns_axis( + self, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """Convert time-of-flight channel steps to nanoseconds. + + Args: + tof_ns_column (str, optional): Name of the generated column containing the + time-of-flight in nanosecond. + Defaults to config["dataframe"]["tof_ns_column"]. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: additional arguments are passed to ``EnergyCalibrator.tof_step_to_ns()``. + + """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + + if self._dataframe is not None: + if verbose: + print("Adding time-of-flight column in nanoseconds to dataframe:") + # TODO assert order of execution through metadata + + df, metadata = self.ec.append_tof_ns_axis( + df=self._dataframe, + **kwds, + ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.append_tof_ns_axis( + df=self._timed_dataframe, + **kwds, + ) + + self._attributes.add( + metadata, + "tof_ns_conversion", + duplicate_policy="overwrite", + ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe)
+ +
[docs] def align_dld_sectors( + self, + sector_delays: np.ndarray = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """Align the 8s sectors of the HEXTOF endstation. + + Args: + sector_delays (np.ndarray, optional): Array containing the sector delays. Defaults to + config["dataframe"]["sector_delays"]. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: additional arguments are passed to ``EnergyCalibrator.align_dld_sectors()``. + """ + if verbose is None: + verbose = self.verbose + + tof_column = self._config["dataframe"]["tof_column"] + + if self._dataframe is not None: + if verbose: + print("Aligning 8s sectors of dataframe") + # TODO assert order of execution through metadata + + df, metadata = self.ec.align_dld_sectors( + df=self._dataframe, + sector_delays=sector_delays, + **kwds, + ) + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + tdf, _ = self.ec.align_dld_sectors( + df=self._timed_dataframe, + sector_delays=sector_delays, + **kwds, + ) + + self._attributes.add( + metadata, + "dld_sector_alignment", + duplicate_policy="raise", + ) + self._dataframe = df + if self._timed_dataframe is not None and tof_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe)
+ + # Delay calibration function +
[docs] def calibrate_delay_axis( + self, + delay_range: Tuple[float, float] = None, + datafile: str = None, + preview: bool = False, + verbose: bool = None, + **kwds, + ): + """Append delay column to dataframe. Either provide delay ranges, or read + them from a file. + + Args: + delay_range (Tuple[float, float], optional): The scanned delay range in + picoseconds. Defaults to None. + datafile (str, optional): The file from which to read the delay ranges. + Defaults to None. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + **kwds: Keyword args passed to ``DelayCalibrator.append_delay_axis``. + """ + if verbose is None: + verbose = self.verbose + + adc_column = self._config["dataframe"]["adc_column"] + if adc_column not in self._dataframe.columns: + raise ValueError(f"ADC column {adc_column} not found in dataframe, cannot calibrate!") + + if self._dataframe is not None: + if verbose: + print("Adding delay column to dataframe:") + + if delay_range is None and datafile is None: + if len(self.dc.calibration) == 0: + try: + datafile = self._files[0] + except IndexError: + print( + "No datafile available, specify either", + " 'datafile' or 'delay_range'", + ) + raise + + df, metadata = self.dc.append_delay_axis( + self._dataframe, + delay_range=delay_range, + datafile=datafile, + verbose=verbose, + **kwds, + ) + if self._timed_dataframe is not None and adc_column in self._timed_dataframe.columns: + tdf, _ = self.dc.append_delay_axis( + self._timed_dataframe, + delay_range=delay_range, + datafile=datafile, + verbose=False, + **kwds, + ) + + # Add Metadata + self._attributes.add( + metadata, + "delay_calibration", + duplicate_policy="overwrite", + ) + self._dataframe = df + if self._timed_dataframe is not None and adc_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if self.verbose: + print(self._dataframe)
+ +
[docs] def save_delay_calibration( + self, + filename: str = None, + overwrite: bool = False, + ) -> None: + """Save the generated delay calibration parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + + if len(self.dc.calibration) == 0: + raise ValueError("No delay calibration parameters to save!") + calibration = {} + for key, value in self.dc.calibration.items(): + if key == "datafile": + calibration[key] = value + elif key in ["adc_range", "delay_range", "delay_range_mm"]: + calibration[key] = [float(i) for i in value] + else: + calibration[key] = float(value) + + if "creation_date" not in calibration: + calibration["creation_date"] = datetime.now().timestamp() + + config = { + "delay": { + "calibration": calibration, + }, + } + save_config(config, filename, overwrite)
+ +
[docs] def add_delay_offset( + self, + constant: float = None, + flip_delay_axis: bool = None, + columns: Union[str, Sequence[str]] = None, + weights: Union[float, Sequence[float]] = 1.0, + reductions: Union[str, Sequence[str]] = None, + preserve_mean: Union[bool, Sequence[bool]] = False, + preview: bool = False, + verbose: bool = None, + ) -> None: + """Shift the delay axis of the dataframe by a constant or other columns. + + Args: + constant (float, optional): The constant to shift the delay axis by. + flip_delay_axis (bool, optional): Option to reverse the direction of the delay axis. + columns (Union[str, Sequence[str]]): Name of the column(s) to apply the shift from. + weights (Union[float, Sequence[float]]): weights to apply to the columns. + Can also be used to flip the sign (e.g. -1). Defaults to 1. + preserve_mean (bool): Whether to subtract the mean of the column before applying the + shift. Defaults to False. + reductions (str): The reduction to apply to the column. Should be an available method + of dask.dataframe.Series. For example "mean". In this case the function is applied + to the column to generate a single value for the whole dataset. If None, the shift + is applied per-dataframe-row. Defaults to None. Currently only "mean" is supported. + preview (bool, optional): Option to preview the first elements of the data frame. + Defaults to False. + verbose (bool, optional): Option to print out diagnostic information. + Defaults to config["core"]["verbose"]. + + Raises: + ValueError: If the delay column is not in the dataframe. + """ + if verbose is None: + verbose = self.verbose + + delay_column = self._config["dataframe"]["delay_column"] + if delay_column not in self._dataframe.columns: + raise ValueError(f"Delay column {delay_column} not found in dataframe! ") + + if self.dataframe is not None: + if verbose: + print("Adding delay offset to dataframe:") + df, metadata = self.dc.add_offsets( + df=self._dataframe, + constant=constant, + flip_delay_axis=flip_delay_axis, + columns=columns, + delay_column=delay_column, + weights=weights, + reductions=reductions, + preserve_mean=preserve_mean, + verbose=verbose, + ) + if self._timed_dataframe is not None and delay_column in self._timed_dataframe.columns: + tdf, _ = self.dc.add_offsets( + df=self._timed_dataframe, + constant=constant, + flip_delay_axis=flip_delay_axis, + columns=columns, + delay_column=delay_column, + weights=weights, + reductions=reductions, + preserve_mean=preserve_mean, + verbose=False, + ) + + self._attributes.add( + metadata, + "delay_offset", + duplicate_policy="append", + ) + self._dataframe = df + if self._timed_dataframe is not None and delay_column in self._timed_dataframe.columns: + self._timed_dataframe = tdf + else: + raise ValueError("No dataframe loaded!") + if preview: + print(self._dataframe.head(10)) + else: + if verbose: + print(self._dataframe)
+ +
[docs] def save_delay_offsets( + self, + filename: str = None, + overwrite: bool = False, + ) -> None: + """Save the generated delay calibration parameters to the folder config file. + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + if filename is None: + filename = "sed_config.yaml" + if len(self.dc.offsets) == 0: + raise ValueError("No delay offset parameters to save!") + + if "creation_date" not in self.ec.offsets.keys(): + self.ec.offsets["creation_date"] = datetime.now().timestamp() + + config = { + "delay": { + "offsets": self.dc.offsets, + }, + } + save_config(config, filename, overwrite) + print(f'Saved delay offset parameters to "{filename}".')
+ +
[docs] def save_workflow_params( + self, + filename: str = None, + overwrite: bool = False, + ) -> None: + """run all save calibration parameter methods + + Args: + filename (str, optional): Filename of the config dictionary to save to. + Defaults to "sed_config.yaml" in the current folder. + overwrite (bool, optional): Option to overwrite the present dictionary. + Defaults to False. + """ + for method in [ + self.save_splinewarp, + self.save_transformations, + self.save_momentum_calibration, + self.save_energy_correction, + self.save_energy_calibration, + self.save_energy_offset, + self.save_delay_calibration, + self.save_delay_offsets, + ]: + try: + method(filename, overwrite) + except (ValueError, AttributeError, KeyError): + pass
+ +
[docs] def add_jitter( + self, + cols: List[str] = None, + amps: Union[float, Sequence[float]] = None, + **kwds, + ): + """Add jitter to the selected dataframe columns. + + Args: + cols (List[str], optional): The columns onto which to apply jitter. + Defaults to config["dataframe"]["jitter_cols"]. + amps (Union[float, Sequence[float]], optional): Amplitude scalings for the + jittering noise. If one number is given, the same is used for all axes. + For uniform noise (default) it will cover the interval [-amp, +amp]. + Defaults to config["dataframe"]["jitter_amps"]. + **kwds: additional keyword arguments passed to ``apply_jitter``. + """ + if cols is None: + cols = self._config["dataframe"]["jitter_cols"] + for loc, col in enumerate(cols): + if col.startswith("@"): + cols[loc] = self._config["dataframe"].get(col.strip("@")) + + if amps is None: + amps = self._config["dataframe"]["jitter_amps"] + + self._dataframe = self._dataframe.map_partitions( + apply_jitter, + cols=cols, + cols_jittered=cols, + amps=amps, + **kwds, + ) + if self._timed_dataframe is not None: + cols_timed = cols.copy() + for col in cols: + if col not in self._timed_dataframe.columns: + cols_timed.remove(col) + + if cols_timed: + self._timed_dataframe = self._timed_dataframe.map_partitions( + apply_jitter, + cols=cols_timed, + cols_jittered=cols_timed, + ) + metadata = [] + for col in cols: + metadata.append(col) + # TODO: allow only appending if columns are not jittered yet + self._attributes.add(metadata, "jittering", duplicate_policy="append")
+ +
[docs] def add_time_stamped_data( + self, + dest_column: str, + time_stamps: np.ndarray = None, + data: np.ndarray = None, + archiver_channel: str = None, + **kwds, + ): + """Add data in form of timestamp/value pairs to the dataframe using interpolation to the + timestamps in the dataframe. The time-stamped data can either be provided, or fetched from + an EPICS archiver instance. + + Args: + dest_column (str): destination column name + time_stamps (np.ndarray, optional): Time stamps of the values to add. If omitted, + time stamps are retrieved from the epics archiver + data (np.ndarray, optional): Values corresponding at the time stamps in time_stamps. + If omitted, data are retrieved from the epics archiver. + archiver_channel (str, optional): EPICS archiver channel from which to retrieve data. + Either this or data and time_stamps have to be present. + **kwds: additional keyword arguments passed to ``add_time_stamped_data``. + """ + time_stamp_column = kwds.pop( + "time_stamp_column", + self._config["dataframe"].get("time_stamp_alias", ""), + ) + + if time_stamps is None and data is None: + if archiver_channel is None: + raise ValueError( + "Either archiver_channel or both time_stamps and data have to be present!", + ) + if self.loader.__name__ != "mpes": + raise NotImplementedError( + "This function is currently only implemented for the mpes loader!", + ) + ts_from, ts_to = cast(MpesLoader, self.loader).get_start_and_end_time() + # get channel data with +-5 seconds safety margin + time_stamps, data = get_archiver_data( + archiver_url=self._config["metadata"].get("archiver_url", ""), + archiver_channel=archiver_channel, + ts_from=ts_from - 5, + ts_to=ts_to + 5, + ) + + self._dataframe = add_time_stamped_data( + self._dataframe, + time_stamps=time_stamps, + data=data, + dest_column=dest_column, + time_stamp_column=time_stamp_column, + **kwds, + ) + if self._timed_dataframe is not None: + if time_stamp_column in self._timed_dataframe: + self._timed_dataframe = add_time_stamped_data( + self._timed_dataframe, + time_stamps=time_stamps, + data=data, + dest_column=dest_column, + time_stamp_column=time_stamp_column, + **kwds, + ) + metadata: List[Any] = [] + metadata.append(dest_column) + metadata.append(time_stamps) + metadata.append(data) + self._attributes.add(metadata, "time_stamped_data", duplicate_policy="append")
+ +
[docs] def pre_binning( + self, + df_partitions: Union[int, Sequence[int]] = 100, + axes: List[str] = None, + bins: List[int] = None, + ranges: Sequence[Tuple[float, float]] = None, + **kwds, + ) -> xr.DataArray: + """Function to do an initial binning of the dataframe loaded to the class. + + Args: + df_partitions (Union[int, Sequence[int]], optional): Number of dataframe partitions to + use for the initial binning. Defaults to 100. + axes (List[str], optional): Axes to bin. + Defaults to config["momentum"]["axes"]. + bins (List[int], optional): Bin numbers to use for binning. + Defaults to config["momentum"]["bins"]. + ranges (List[Tuple], optional): Ranges to use for binning. + Defaults to config["momentum"]["ranges"]. + **kwds: Keyword argument passed to ``compute``. + + Returns: + xr.DataArray: pre-binned data-array. + """ + if axes is None: + axes = self._config["momentum"]["axes"] + for loc, axis in enumerate(axes): + if axis.startswith("@"): + axes[loc] = self._config["dataframe"].get(axis.strip("@")) + + if bins is None: + bins = self._config["momentum"]["bins"] + if ranges is None: + ranges_ = list(self._config["momentum"]["ranges"]) + ranges_[2] = np.asarray(ranges_[2]) / 2 ** ( + self._config["dataframe"]["tof_binning"] - 1 + ) + ranges = [cast(Tuple[float, float], tuple(v)) for v in ranges_] + + assert self._dataframe is not None, "dataframe needs to be loaded first!" + + return self.compute( + bins=bins, + axes=axes, + ranges=ranges, + df_partitions=df_partitions, + **kwds, + )
+ +
[docs] def compute( + self, + bins: Union[ + int, + dict, + tuple, + List[int], + List[np.ndarray], + List[tuple], + ] = 100, + axes: Union[str, Sequence[str]] = None, + ranges: Sequence[Tuple[float, float]] = None, + normalize_to_acquisition_time: Union[bool, str] = False, + **kwds, + ) -> xr.DataArray: + """Compute the histogram along the given dimensions. + + Args: + bins (int, dict, tuple, List[int], List[np.ndarray], List[tuple], optional): + Definition of the bins. Can be any of the following cases: + + - an integer describing the number of bins in on all dimensions + - a tuple of 3 numbers describing start, end and step of the binning + range + - a np.arrays defining the binning edges + - a list (NOT a tuple) of any of the above (int, tuple or np.ndarray) + - a dictionary made of the axes as keys and any of the above as values. + + This takes priority over the axes and range arguments. Defaults to 100. + axes (Union[str, Sequence[str]], optional): The names of the axes (columns) + on which to calculate the histogram. The order will be the order of the + dimensions in the resulting array. Defaults to None. + ranges (Sequence[Tuple[float, float]], optional): list of tuples containing + the start and end point of the binning range. Defaults to None. + normalize_to_acquisition_time (Union[bool, str]): Option to normalize the + result to the acquisition time. If a "slow" axis was scanned, providing + the name of the scanned axis will compute and apply the corresponding + normalization histogram. Defaults to False. + **kwds: Keyword arguments: + + - **hist_mode**: Histogram calculation method. "numpy" or "numba". See + ``bin_dataframe`` for details. Defaults to + config["binning"]["hist_mode"]. + - **mode**: Defines how the results from each partition are combined. + "fast", "lean" or "legacy". See ``bin_dataframe`` for details. + Defaults to config["binning"]["mode"]. + - **pbar**: Option to show the tqdm progress bar. Defaults to + config["binning"]["pbar"]. + - **n_cores**: Number of CPU cores to use for parallelization. + Defaults to config["binning"]["num_cores"] or N_CPU-1. + - **threads_per_worker**: Limit the number of threads that + multiprocessing can spawn per binning thread. Defaults to + config["binning"]["threads_per_worker"]. + - **threadpool_api**: The API to use for multiprocessing. "blas", + "openmp" or None. See ``threadpool_limit`` for details. Defaults to + config["binning"]["threadpool_API"]. + - **df_partitions**: A sequence of dataframe partitions, or the + number of the dataframe partitions to use. Defaults to all partitions. + - **filter**: A Sequence of Dictionaries with entries "col", "lower_bound", + "upper_bound" to apply as filter to the dataframe before binning. The + dataframe in the class remains unmodified by this. + + Additional kwds are passed to ``bin_dataframe``. + + Raises: + AssertError: Rises when no dataframe has been loaded. + + Returns: + xr.DataArray: The result of the n-dimensional binning represented in an + xarray object, combining the data with the axes. + """ + assert self._dataframe is not None, "dataframe needs to be loaded first!" + + hist_mode = kwds.pop("hist_mode", self._config["binning"]["hist_mode"]) + mode = kwds.pop("mode", self._config["binning"]["mode"]) + pbar = kwds.pop("pbar", self._config["binning"]["pbar"]) + num_cores = kwds.pop("num_cores", self._config["binning"]["num_cores"]) + threads_per_worker = kwds.pop( + "threads_per_worker", + self._config["binning"]["threads_per_worker"], + ) + threadpool_api = kwds.pop( + "threadpool_API", + self._config["binning"]["threadpool_API"], + ) + df_partitions: Union[int, Sequence[int]] = kwds.pop("df_partitions", None) + if isinstance(df_partitions, int): + df_partitions = list(range(0, min(df_partitions, self._dataframe.npartitions))) + if df_partitions is not None: + dataframe = self._dataframe.partitions[df_partitions] + else: + dataframe = self._dataframe + + filter_params = kwds.pop("filter", None) + if filter_params is not None: + try: + for param in filter_params: + if "col" not in param: + raise ValueError( + "'col' needs to be defined for each filter entry! ", + f"Not present in {param}.", + ) + assert set(param.keys()).issubset({"col", "lower_bound", "upper_bound"}) + dataframe = apply_filter(dataframe, **param) + except AssertionError as exc: + invalid_keys = set(param.keys()) - {"lower_bound", "upper_bound"} + raise ValueError( + "Only 'col', 'lower_bound' and 'upper_bound' allowed as filter entries. ", + f"Parameters {invalid_keys} not valid in {param}.", + ) from exc + + self._binned = bin_dataframe( + df=dataframe, + bins=bins, + axes=axes, + ranges=ranges, + hist_mode=hist_mode, + mode=mode, + pbar=pbar, + n_cores=num_cores, + threads_per_worker=threads_per_worker, + threadpool_api=threadpool_api, + **kwds, + ) + + for dim in self._binned.dims: + try: + self._binned[dim].attrs["unit"] = self._config["dataframe"]["units"][dim] + except KeyError: + pass + + self._binned.attrs["units"] = "counts" + self._binned.attrs["long_name"] = "photoelectron counts" + self._binned.attrs["metadata"] = self._attributes.metadata + + if normalize_to_acquisition_time: + if isinstance(normalize_to_acquisition_time, str): + axis = normalize_to_acquisition_time + print( + f"Calculate normalization histogram for axis '{axis}'...", + ) + self._normalization_histogram = self.get_normalization_histogram( + axis=axis, + df_partitions=df_partitions, + ) + # if the axes are named correctly, xarray figures out the normalization correctly + self._normalized = self._binned / self._normalization_histogram + self._attributes.add( + self._normalization_histogram.values, + name="normalization_histogram", + duplicate_policy="overwrite", + ) + else: + acquisition_time = self.loader.get_elapsed_time( + fids=df_partitions, + ) + if acquisition_time > 0: + self._normalized = self._binned / acquisition_time + self._attributes.add( + acquisition_time, + name="normalization_histogram", + duplicate_policy="overwrite", + ) + + self._normalized.attrs["units"] = "counts/second" + self._normalized.attrs["long_name"] = "photoelectron counts per second" + self._normalized.attrs["metadata"] = self._attributes.metadata + + return self._normalized + + return self._binned
+ +
[docs] def get_normalization_histogram( + self, + axis: str = "delay", + use_time_stamps: bool = False, + **kwds, + ) -> xr.DataArray: + """Generates a normalization histogram from the timed dataframe. Optionally, + use the TimeStamps column instead. + + Args: + axis (str, optional): The axis for which to compute histogram. + Defaults to "delay". + use_time_stamps (bool, optional): Use the TimeStamps column of the + dataframe, rather than the timed dataframe. Defaults to False. + **kwds: Keyword arguments: + + - **df_partitions**: A sequence of dataframe partitions, or the + number of the dataframe partitions to use. Defaults to all partitions. + + Raises: + ValueError: Raised if no data are binned. + ValueError: Raised if 'axis' not in binned coordinates. + ValueError: Raised if config["dataframe"]["time_stamp_alias"] not found + in Dataframe. + + Returns: + xr.DataArray: The computed normalization histogram (in TimeStamp units + per bin). + """ + + if self._binned is None: + raise ValueError("Need to bin data first!") + if axis not in self._binned.coords: + raise ValueError(f"Axis '{axis}' not found in binned data!") + + df_partitions: Union[int, Sequence[int]] = kwds.pop("df_partitions", None) + if isinstance(df_partitions, int): + df_partitions = list(range(0, min(df_partitions, self._dataframe.npartitions))) + if use_time_stamps or self._timed_dataframe is None: + if df_partitions is not None: + self._normalization_histogram = normalization_histogram_from_timestamps( + self._dataframe.partitions[df_partitions], + axis, + self._binned.coords[axis].values, + self._config["dataframe"]["time_stamp_alias"], + ) + else: + self._normalization_histogram = normalization_histogram_from_timestamps( + self._dataframe, + axis, + self._binned.coords[axis].values, + self._config["dataframe"]["time_stamp_alias"], + ) + else: + if df_partitions is not None: + self._normalization_histogram = normalization_histogram_from_timed_dataframe( + self._timed_dataframe.partitions[df_partitions], + axis, + self._binned.coords[axis].values, + self._config["dataframe"]["timed_dataframe_unit_time"], + ) + else: + self._normalization_histogram = normalization_histogram_from_timed_dataframe( + self._timed_dataframe, + axis, + self._binned.coords[axis].values, + self._config["dataframe"]["timed_dataframe_unit_time"], + ) + + return self._normalization_histogram
+ +
[docs] def view_event_histogram( + self, + dfpid: int, + ncol: int = 2, + bins: Sequence[int] = None, + axes: Sequence[str] = None, + ranges: Sequence[Tuple[float, float]] = None, + backend: str = "bokeh", + legend: bool = True, + histkwds: dict = None, + legkwds: dict = None, + **kwds, + ): + """Plot individual histograms of specified dimensions (axes) from a substituent + dataframe partition. + + Args: + dfpid (int): Number of the data frame partition to look at. + ncol (int, optional): Number of columns in the plot grid. Defaults to 2. + bins (Sequence[int], optional): Number of bins to use for the specified + axes. Defaults to config["histogram"]["bins"]. + axes (Sequence[str], optional): Names of the axes to display. + Defaults to config["histogram"]["axes"]. + ranges (Sequence[Tuple[float, float]], optional): Value ranges of all + specified axes. Defaults to config["histogram"]["ranges"]. + backend (str, optional): Backend of the plotting library + ('matplotlib' or 'bokeh'). Defaults to "bokeh". + legend (bool, optional): Option to include a legend in the histogram plots. + Defaults to True. + histkwds (dict, optional): Keyword arguments for histograms + (see ``matplotlib.pyplot.hist()``). Defaults to {}. + legkwds (dict, optional): Keyword arguments for legend + (see ``matplotlib.pyplot.legend()``). Defaults to {}. + **kwds: Extra keyword arguments passed to + ``sed.diagnostics.grid_histogram()``. + + Raises: + TypeError: Raises when the input values are not of the correct type. + """ + if bins is None: + bins = self._config["histogram"]["bins"] + if axes is None: + axes = self._config["histogram"]["axes"] + axes = list(axes) + for loc, axis in enumerate(axes): + if axis.startswith("@"): + axes[loc] = self._config["dataframe"].get(axis.strip("@")) + if ranges is None: + ranges = list(self._config["histogram"]["ranges"]) + for loc, axis in enumerate(axes): + if axis == self._config["dataframe"]["tof_column"]: + ranges[loc] = np.asarray(ranges[loc]) / 2 ** ( + self._config["dataframe"]["tof_binning"] - 1 + ) + elif axis == self._config["dataframe"]["adc_column"]: + ranges[loc] = np.asarray(ranges[loc]) / 2 ** ( + self._config["dataframe"]["adc_binning"] - 1 + ) + + input_types = map(type, [axes, bins, ranges]) + allowed_types = [list, tuple] + + df = self._dataframe + + if not set(input_types).issubset(allowed_types): + raise TypeError( + "Inputs of axes, bins, ranges need to be list or tuple!", + ) + + # Read out the values for the specified groups + group_dict_dd = {} + dfpart = df.get_partition(dfpid) + cols = dfpart.columns + for ax in axes: + group_dict_dd[ax] = dfpart.values[:, cols.get_loc(ax)] + group_dict = ddf.compute(group_dict_dd)[0] + + # Plot multiple histograms in a grid + grid_histogram( + group_dict, + ncol=ncol, + rvs=axes, + rvbins=bins, + rvranges=ranges, + backend=backend, + legend=legend, + histkwds=histkwds, + legkwds=legkwds, + **kwds, + )
+ +
[docs] def save( + self, + faddr: str, + **kwds, + ): + """Saves the binned data to the provided path and filename. + + Args: + faddr (str): Path and name of the file to write. Its extension determines + the file type to write. Valid file types are: + + - "*.tiff", "*.tif": Saves a TIFF stack. + - "*.h5", "*.hdf5": Saves an HDF5 file. + - "*.nxs", "*.nexus": Saves a NeXus file. + + **kwds: Keyword arguments, which are passed to the writer functions: + For TIFF writing: + + - **alias_dict**: Dictionary of dimension aliases to use. + + For HDF5 writing: + + - **mode**: hdf5 read/write mode. Defaults to "w". + + For NeXus: + + - **reader**: Name of the pynxtools reader to use. + Defaults to config["nexus"]["reader"] + - **definition**: NeXus application definition to use for saving. + Must be supported by the used ``reader``. Defaults to + config["nexus"]["definition"] + - **input_files**: A list of input files to pass to the reader. + Defaults to config["nexus"]["input_files"] + - **eln_data**: An electronic-lab-notebook file in '.yaml' format + to add to the list of files to pass to the reader. + """ + if self._binned is None: + raise NameError("Need to bin data first!") + + if self._normalized is not None: + data = self._normalized + else: + data = self._binned + + extension = pathlib.Path(faddr).suffix + + if extension in (".tif", ".tiff"): + to_tiff( + data=data, + faddr=faddr, + **kwds, + ) + elif extension in (".h5", ".hdf5"): + to_h5( + data=data, + faddr=faddr, + **kwds, + ) + elif extension in (".nxs", ".nexus"): + try: + reader = kwds.pop("reader", self._config["nexus"]["reader"]) + definition = kwds.pop( + "definition", + self._config["nexus"]["definition"], + ) + input_files = kwds.pop( + "input_files", + self._config["nexus"]["input_files"], + ) + except KeyError as exc: + raise ValueError( + "The nexus reader, definition and input files need to be provide!", + ) from exc + + if isinstance(input_files, str): + input_files = [input_files] + + if "eln_data" in kwds: + input_files.append(kwds.pop("eln_data")) + + to_nexus( + data=data, + faddr=faddr, + reader=reader, + definition=definition, + input_files=input_files, + **kwds, + ) + + else: + raise NotImplementedError( + f"Unrecognized file format: {extension}.", + )
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/dataset/dataset.html b/sed/2.1.0/_modules/sed/dataset/dataset.html new file mode 100644 index 0000000..05a4a62 --- /dev/null +++ b/sed/2.1.0/_modules/sed/dataset/dataset.html @@ -0,0 +1,905 @@ + + + + + + + + + + sed.dataset.dataset — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.dataset.dataset

+"""This module provides a Dataset class to download and extract datasets from web.
+These datasets are defined in a JSON file. The Dataset class implements these features
+Easy API:
+from sed.dataset import datasets
+datasets.get("NAME")
+"""
+from __future__ import annotations
+
+import os
+import shutil
+import zipfile
+from datetime import datetime
+
+import requests
+from tqdm.auto import tqdm
+
+from sed.core.config import load_config
+from sed.core.config import parse_config
+from sed.core.config import save_config
+from sed.core.config import USER_CONFIG_PATH
+from sed.core.logging import setup_logging
+
+
+# Configure logging
+logger = setup_logging(__name__)
+
+
+
[docs]class DatasetsManager: + """Class to manage adding and removing datasets from the JSON file.""" + + NAME = "datasets" + FILENAME = NAME + ".json" + json_path = {} + json_path["user"] = os.path.join(USER_CONFIG_PATH, FILENAME) + json_path["module"] = os.path.join(os.path.dirname(__file__), FILENAME) + json_path["folder"] = "./" + FILENAME + +
[docs] @staticmethod + def load_datasets_dict() -> dict: + """ + Loads the datasets configuration dictionary from the user's datasets JSON file. + + If the file does not exist, it copies the default datasets JSON file from the module + directory to the user's datasets directory. + + Returns: + dict: The datasets dict loaded from the user's datasets JSON file. + """ + if not os.path.exists(DatasetsManager.json_path["user"]): + shutil.copy(DatasetsManager.json_path["module"], DatasetsManager.json_path["user"]) + + return parse_config( + folder_config=DatasetsManager.json_path["folder"], + system_config=DatasetsManager.json_path["user"], + default_config=DatasetsManager.json_path["module"], + verbose=False, + )
+ +
[docs] @staticmethod + def add(data_name: str, info: dict, levels: list = ["user"]): + """ + Adds a new dataset to the datasets JSON file. + + Args: + data_name (str): Name of the dataset. + info (dict): Information about the dataset. + levels (list): List of levels to add the dataset to. Default is ["user"]. + """ + for level in levels: + path = DatasetsManager.json_path[level] + json_dict = load_config(path) if os.path.exists(path) else {} + # if data_name already exists, throw error + if data_name in json_dict: + error_message = f"Dataset {data_name} already exists in {level} datasets.json." + logger.error(error_message) + raise ValueError(error_message) + json_dict[data_name] = info + save_config(json_dict, path) + logger.info(f"Added {data_name} dataset to {level} datasets.json")
+ +
[docs] @staticmethod + def remove(data_name: str, levels: list = ["user"]): + """ + Removes a dataset from the datasets JSON file. + + Args: + data_name (str): Name of the dataset. + levels (list): List of levels to remove the dataset from. Default is ["user"]. + """ + for level in levels: + path = DatasetsManager.json_path[level] + if os.path.exists(path): + json_dict = load_config(path) + del json_dict[data_name] + save_config(json_dict, path, overwrite=True) + logger.info(f"Removed {data_name} dataset from {level} datasets.json")
+ + +
[docs]class Dataset: + def __init__(self): + self._datasets: dict = DatasetsManager.load_datasets_dict() + self._dir: str = None + self._subdirs: list[str] = None + self._data_name: str = None + self._state: dict = None + self.subdirs: list[str] = None + self.dir: str = None + + @property + def available(self) -> list: + """ + Returns a list of available datasets. + + Returns: + list: List of available datasets. + """ + self._datasets = DatasetsManager.load_datasets_dict() + # remove Test from available datasets + return [dataset for dataset in self._datasets if dataset != "Test"] + + def _check_dataset_availability(self) -> dict: + """ + Checks if the specified dataset is available in the predefined list of datasets. + + Returns: + dict: The dataset information if available. + + Raises: + ValueError: If the dataset is not found in the predefined list. + """ + if self._data_name not in self._datasets: + error_message = ( + f"Data '{self._data_name}' is not available for fetching.\n" + f"Available datasets are: {self.available}" + ) + logger.error(error_message) + raise ValueError(error_message) + return self._datasets.get(self._data_name) + + @property + def data_name(self) -> str: + """Get the data name.""" + return self._data_name + + @data_name.setter + def data_name(self, value: str): + """Set the data name and update the state.""" + self._data_name = value + self._datasets = DatasetsManager.load_datasets_dict() + self._state = self._check_dataset_availability() + self._subdirs = self._state.get("subdirs", []) + + @property + def existing_data_paths(self) -> list: + """Get paths where dataset exists.""" + if not self._data_name: + raise ValueError("Data name not set.") + return self._state.get("data_path", []) + + def _set_data_dir( + self, + root_dir: str, + use_existing: bool, + ): + """ + Determines and sets the data path for a dataset. If a data path is not provided, + it uses the existing data path or creates a new one. It also notifies the user + if the specified data path differs from an existing data path. + + Args: + root_dir (str): The desired path where the dataset should be stored. + use_existing (bool): Whether to use the existing data path. + """ + existing_data_path = self.existing_data_paths[0] if self.existing_data_paths else None + if use_existing and existing_data_path: + if existing_data_path != root_dir: + logger.info( + f"Not downloading {self._data_name} data as it already exists " + f'at "{existing_data_path}".\n' + "Set 'use_existing' to False if you want to download to a new location.", + ) + dir_ = existing_data_path + path_source = "existing" + else: + if not root_dir: + root_dir = os.getcwd() + path_source = "default" + else: + path_source = "specified" + dir_ = os.path.join(root_dir, DatasetsManager.NAME, self._data_name) + + self._dir = os.path.abspath(dir_) + logger.info(f'Using {path_source} data path for "{self._data_name}": "{self._dir}"') + + if not os.path.exists(self._dir): + os.makedirs(self._dir) + logger.info(f"Created new directory at {self._dir}") + + def _get_file_list(self, ignore_zip: bool = True) -> list[str]: + """ + Returns a list of file paths in the directory and its subdirectories. + + Args: + ignore_zip (bool): Whether to ignore ZIP files. Default is True. + + Returns: + List[str]: List of file paths in the directory and its subdirectories. + """ + file_paths = [] + + # Walk through directory and its subdirectories + for root, _, files in os.walk(self._dir): + for file in files: + # Ignore hidden files and ZIP files if ignore_zip is True + if not file.startswith(".") and (not ignore_zip or not file.endswith(".zip")): + file_path = os.path.join(root, file) + relative_path = os.path.relpath(file_path, self._dir) + file_paths.append(relative_path) + + return file_paths + + def _download_data( + self, + data_url: str, + chunk_size: int = 1024 * 32, + ): + """ + Downloads data from the specified URL. + + Args: + data_url (str): URL of the data. + chunk_size (int): Size of the chunks to download. Default is 32 KB. + """ + zip_file_path = os.path.join(self._dir, f"{self._data_name}.zip") + + if os.path.exists(zip_file_path): + existing_file_size = os.path.getsize(zip_file_path) + else: + existing_file_size = 0 + + headers = {"Range": f"bytes={existing_file_size}-"} + response = requests.get(data_url, headers=headers, stream=True) + + if response.status_code == 416: # Range not satisfiable, file is already fully downloaded + logger.info(f"{self._data_name} data is already fully downloaded.") + + total_length = int(response.headers.get("content-length", 0)) + if response.status_code == 206: # Partial content + total_size = existing_file_size + total_length + else: + total_size = total_length if total_length > 0 else None + + mode = "ab" if existing_file_size > 0 else "wb" + with open(zip_file_path, mode) as f, tqdm( + total=total_size, + initial=existing_file_size, + unit="B", + unit_scale=True, + unit_divisor=1024, + disable=total_size is None, + ) as pbar: + for chunk in response.iter_content(chunk_size=chunk_size): + if chunk: # filter out keep-alive new chunks + f.write(chunk) + pbar.update(len(chunk)) + + logger.info("Download complete.") + + def _extract_data(self, remove_zip: bool = True) -> None: + """ + Extracts data from a ZIP file. + """ + zip_file_path = os.path.join(self._dir, f"{self._data_name}.zip") + + extracted_files = set() + total_files = 0 + + # Check if any subdirectory already contains files + with zipfile.ZipFile(zip_file_path, "r") as zip_ref: + total_files = len(zip_ref.infolist()) + for file in zip_ref.infolist(): + extracted_file_path = os.path.join(self._dir, file.filename) + if ( + os.path.exists(extracted_file_path) + and os.path.getsize(extracted_file_path) == file.file_size + ): + extracted_files.add(file.filename) + + if len(extracted_files) == total_files: + logger.info(f"{self._data_name} data is already fully extracted.") + return + + logger.info(f"Extracting {self._data_name} data...") + with zipfile.ZipFile(zip_file_path, "r") as zip_ref: + with tqdm(total=total_files, unit="file") as pbar: + for file in zip_ref.infolist(): + if file.filename in extracted_files: + pbar.update(1) + continue + zip_ref.extract(file, self._dir) + # Preserve original timestamp + timestamp = datetime(*file.date_time).timestamp() + os.utime(os.path.join(self._dir, file.filename), (timestamp, timestamp)) + pbar.update(1) + logger.info(f"{self._data_name} data extracted successfully.") + + if remove_zip: + os.remove(zip_file_path) + logger.info(f"Removed {self._data_name}.zip file.") + + def _rearrange_data(self) -> None: + """ + Moves files to the main directory if specified. + """ + for subdir in self._subdirs: + source_path = os.path.join(self._dir, subdir) + if os.path.isdir(source_path): + logger.info(f"Rearranging files in {subdir}.") + + # Count the total number of files to move + total_files = sum(len(files) for _, _, files in os.walk(source_path)) + + with tqdm(total=total_files, unit="file") as pbar: + for root, _, files in os.walk(source_path): + for file in files: + shutil.move(os.path.join(root, file), os.path.join(self._dir, file)) + pbar.update(1) + + logger.info("File movement complete.") + shutil.rmtree(source_path) + else: + error_message = f"Subdirectory {subdir} not found." + logger.error(error_message) + raise FileNotFoundError(error_message) + self._subdirs = [] + logger.info("Rearranging complete.") + +
[docs] def get( + self, + data_name: str, + **kwargs, + ): + """ + Fetches the specified data and extracts it to the given data path. + + Args: + data_name (str): Name of the data to fetch. + root_dir (str): Path where the data should be stored. Default is the current directory. + use_existing (bool): Whether to use the existing data path. Default is True. + remove_zip (bool): Whether to remove the ZIP file after extraction. Default is True. + ignore_zip (bool): Whether to ignore ZIP files when listing files. Default is True. + """ + self.data_name = data_name # sets self._data_name and self._state + + self._set_data_dir( + kwargs.get("root_dir", None), + kwargs.get("use_existing", True), + ) + files_in_dir = self._get_file_list(kwargs.get("ignore_zip", True)) + file_list: list = self._state.get("files", []) + + # if all files are present, skip download + if file_list and all(file in files_in_dir for file in file_list): + logger.info(f"{self._data_name} data is already present.") + else: + url: str = self._state.get("url") + self._download_data(url) + self._extract_data(kwargs.get("remove_zip", True)) + if self._state.get("rearrange_files", False): + self._rearrange_data() + + # Update datasets JSON + self._state["files"] = self._get_file_list() + existing = set(self.existing_data_paths) + existing.add(self._dir) + self._state["data_path"] = list(existing) + save_config({self._data_name: self._state}, DatasetsManager.json_path["user"]) + + self.subdirs = [os.path.join(self._dir, subdir) for subdir in self._subdirs] + self.dir = self._dir
+ +
[docs] def remove(self, data_name: str, instance: str = "all"): + """ + Removes directories of all or defined instances of the specified dataset. + + Args: + data_name (str): Name of the dataset. + instance (str): Name of the instance to remove. Default is "all". + """ + self.data_name = data_name + existing = self.existing_data_paths + if not existing: + logger.info(f"{data_name} data is not present.") + return + + if instance == "all": + for path in existing: + if os.path.exists(path): + shutil.rmtree(path) + logger.info(f"Removed {path}") + existing.clear() + else: + if instance not in existing: + logger.info(f"{instance} instance of {data_name} data is not present.") + return + if os.path.exists(instance): + shutil.rmtree(instance) + logger.info(f"Removed {instance}") + existing.remove(instance) + + # Update datasets JSON + self._state["data_path"] = existing + save_config({self._data_name: self._state}, DatasetsManager.json_path["user"])
+ + +dataset = Dataset() +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/diagnostics.html b/sed/2.1.0/_modules/sed/diagnostics.html new file mode 100644 index 0000000..7925ad7 --- /dev/null +++ b/sed/2.1.0/_modules/sed/diagnostics.html @@ -0,0 +1,650 @@ + + + + + + + + + + sed.diagnostics — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.diagnostics

+"""This module contains diagnostic output functions for the sed module
+
+"""
+from typing import Sequence
+from typing import Tuple
+
+import bokeh.plotting as pbk
+import matplotlib.pyplot as plt
+import numpy as np
+from bokeh.io import output_notebook
+from bokeh.layouts import gridplot
+
+
+
[docs]def plot_single_hist( + histvals: np.ndarray, + edges: np.ndarray, + legend: str = None, + **kwds, +) -> pbk.figure: + """Bokeh-based plotting of a single histogram with legend and tooltips. + + Args: + histvals (np.ndarray): Histogram counts (e.g. vertical axis). + edges (np.ndarray): Histogram edge values (e.g. horizontal axis). + legend (str, optional): Text for the plot legend. Defaults to None. + **kwds: Keyword arguments for ``bokeh.plotting.figure().quad()``. + + Returns: + pbk.figure: An instance of 'bokeh.plotting.figure' as a plot handle. + """ + ttp = kwds.pop("tooltip", [("(x, y)", "($x, $y)")]) + + fig = pbk.figure(background_fill_color="white", tooltips=ttp) + fig.quad( + top=histvals, + bottom=0, + left=edges[:-1], + right=edges[1:], + line_color="white", + alpha=0.8, + legend_label=legend, + **kwds, + ) + + fig.y_range.start = 0 # type: ignore + fig.legend.location = "top_right" + fig.grid.grid_line_color = "lightgrey" + + return fig
+ + +
[docs]def grid_histogram( + dct: dict, + ncol: int, + rvs: Sequence, + rvbins: Sequence, + rvranges: Sequence[Tuple[float, float]], + backend: str = "bokeh", + legend: bool = True, + histkwds: dict = None, + legkwds: dict = None, + **kwds, +): + """Grid plot of multiple 1D histograms. + + Args: + dct (dict): Dictionary containing the name and values of the random variables. + ncol (int): Number of columns in the plot grid. + rvs (Sequence): List of names for the random variables (rvs). + rvbins (Sequence): Bin values for all random variables. + rvranges (Sequence[Tuple[float, float]]): Value ranges of all random variables. + backend (str, optional): Backend for making the plot ('matplotlib' or 'bokeh'). + Defaults to "bokeh". + legend (bool, optional): Option to include a legend in each histogram plot. + Defaults to True. + histkwds (dict, optional): Keyword arguments for histogram plots. + Defaults to None. + legkwds (dict, optional): Keyword arguments for legends. Defaults to None. + **kwds: Additional keyword arguments. + """ + if histkwds is None: + histkwds = {} + if legkwds is None: + legkwds = {} + + figsz = kwds.pop("figsize", (14, 8)) + + if backend == "matplotlib": + nrv = len(rvs) + nrow = int(np.ceil(nrv / ncol)) + histtype = kwds.pop("histtype", "step") + + fig, ax = plt.subplots(nrow, ncol, figsize=figsz) + otherax = ax.copy() + for i, zipped in enumerate(zip(rvs, rvbins, rvranges)): + # Make each histogram plot + rvname, rvbin, rvrg = zipped + try: + axind = np.unravel_index(i, (nrow, ncol)) + ax[axind].hist( + dct[rvname], + bins=rvbin, + range=rvrg, + label=rvname, + histtype=histtype, + **histkwds, + ) + if legend: + ax[axind].legend(fontsize=15, **legkwds) + + otherax[axind] = None + + except IndexError: + ax[i].hist( + dct[rvname], + bins=rvbin, + range=rvrg, + label=rvname, + histtype=histtype, + **histkwds, + ) + if legend: + ax[i].legend(fontsize=15, **legkwds) + + otherax[i] = None + + for oax in otherax.flatten(): + if oax is not None: + fig.delaxes(oax) + + elif backend == "bokeh": + output_notebook(hide_banner=True) + + plots = [] + for i, zipped in enumerate(zip(rvs, rvbins, rvranges)): + rvname, rvbin, rvrg = zipped + histvals, edges = np.histogram(dct[rvname], bins=rvbin, range=rvrg) + + if legend: + plots.append( + plot_single_hist( + histvals, + edges, + legend=rvname, + **histkwds, + ), + ) + else: + plots.append( + plot_single_hist(histvals, edges, legend=None, **histkwds), + ) + + # Make grid plot + pbk.show( + gridplot( + plots, # type: ignore + ncols=ncol, + width=figsz[0] * 30, + height=figsz[1] * 28, + ), + )
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/io/hdf5.html b/sed/2.1.0/_modules/sed/io/hdf5.html new file mode 100644 index 0000000..684d486 --- /dev/null +++ b/sed/2.1.0/_modules/sed/io/hdf5.html @@ -0,0 +1,679 @@ + + + + + + + + + + sed.io.hdf5 — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.io.hdf5

+"""This module contains hdf5 file input/output functions for the sed.io module
+
+"""
+from typing import Union
+
+import h5py
+import numpy as np
+import xarray as xr
+
+
+def recursive_write_metadata(h5group: h5py.Group, node: dict):
+    """Recurses through a python dictionary and writes it into an hdf5 file.
+
+    Args:
+        h5group (h5py.Group): hdf5 group element where to store the current dict node
+            to.
+        node (dict): dictionary node to store
+
+    Raises:
+        Warning: warns if elements have been converted into strings for saving.
+        ValueError: Raised when elements cannot be saved even as strings.
+    """
+    for key, item in node.items():
+        if isinstance(
+            item,
+            (
+                np.ndarray,
+                np.int64,
+                np.float64,
+                str,
+                bytes,
+                int,
+                float,
+                list,
+            ),
+        ):
+            try:
+                h5group.create_dataset(key, data=item)
+            except TypeError:
+                h5group.create_dataset(key, data=str(item))
+                print(f"Saved {key} as string.")
+        elif isinstance(item, dict):
+            group = h5group.create_group(key)
+            recursive_write_metadata(group, item)
+        else:
+            try:
+                h5group.create_dataset(key, data=str(item))
+                print(f"Saved {key} as string.")
+            except BaseException as exc:
+                raise ValueError(
+                    f"Unknown error occurred, cannot save {item} of type {type(item)}.",
+                ) from exc
+
+
+def recursive_parse_metadata(
+    node: Union[h5py.Group, h5py.Dataset],
+) -> dict:
+    """Recurses through an hdf5 file, and parse it into a dictionary.
+
+    Args:
+        node (Union[h5py.Group, h5py.Dataset]): hdf5 group or dataset to parse into
+            dictionary.
+
+    Returns:
+        dict: Dictionary of elements in the hdf5 path contained in node
+    """
+    if isinstance(node, h5py.Group):
+        dictionary = {}
+        for key, value in node.items():
+            dictionary[key] = recursive_parse_metadata(value)
+
+    else:
+        entry = node[...]
+        try:
+            dictionary = entry.item()
+            if isinstance(dictionary, (bytes, bytearray)):
+                dictionary = dictionary.decode()
+        except ValueError:
+            dictionary = entry
+
+    return dictionary
+
+
+
[docs]def to_h5(data: xr.DataArray, faddr: str, mode: str = "w"): + """Save xarray formatted data to hdf5 + + Args: + data (xr.DataArray): input data + faddr (str): complete file name (including path) + mode (str, optional): hdf5 read/write mode. Defaults to "w". + + Raises: + Warning: subfunction warns if elements have been converted into strings for + saving. + """ + with h5py.File(faddr, mode) as h5_file: + print(f"saving data to {faddr}") + + # Saving data, make a single dataset + dataset = h5_file.create_dataset("binned/BinnedData", data=data.data) + try: + dataset.attrs["units"] = data.attrs["units"] + dataset.attrs["long_name"] = data.attrs["long_name"] + except KeyError: + pass + + # Saving axes + axes_group = h5_file.create_group("axes") + axes_number = 0 + for bin_name in data.dims: + axis = axes_group.create_dataset( + f"ax{axes_number}", + data=data.coords[bin_name], + ) + axis.attrs["name"] = bin_name + try: + axis.attrs["unit"] = data.coords[bin_name].attrs["unit"] + except KeyError: + pass + axes_number += 1 + + if "metadata" in data.attrs and isinstance( + data.attrs["metadata"], + dict, + ): + meta_group = h5_file.create_group("metadata") + + recursive_write_metadata(meta_group, data.attrs["metadata"]) + + print("Saving complete!")
+ + +
[docs]def load_h5(faddr: str, mode: str = "r") -> xr.DataArray: + """Read xarray data from formatted hdf5 file + + Args: + faddr (str): complete file name (including path) + mode (str, optional): hdf5 read/write mode. Defaults to "r". + + Raises: + ValueError: Raised if data or axes are not found in the file. + + Returns: + xr.DataArray: output xarray data + """ + with h5py.File(faddr, mode) as h5_file: + # Reading data array + try: + data = np.asarray(h5_file["binned"]["BinnedData"]) + except KeyError as exc: + raise ValueError( + f"Wrong Data Format, the BinnedData were not found. The error was{exc}.", + ) from exc + + # Reading the axes + bin_axes = [] + bin_names = [] + + try: + for axis in h5_file["axes"]: + bin_axes.append(h5_file["axes"][axis]) + bin_names.append(h5_file["axes"][axis].attrs["name"]) + except KeyError as exc: + raise ValueError( + f"Wrong Data Format, the axes were not found. The error was {exc}", + ) from exc + + # load metadata + metadata = None + if "metadata" in h5_file: + metadata = recursive_parse_metadata(h5_file["metadata"]) + + coords = {} + for name, vals in zip(bin_names, bin_axes): + coords[name] = vals + + xarray = xr.DataArray(data, dims=bin_names, coords=coords) + + try: + for axis in range(len(bin_axes)): + xarray[bin_names[axis]].attrs["unit"] = h5_file["axes"][f"ax{axis}"].attrs["unit"] + xarray.attrs["units"] = h5_file["binned"]["BinnedData"].attrs["units"] + xarray.attrs["long_name"] = h5_file["binned"]["BinnedData"].attrs["long_name"] + except (KeyError, TypeError): + pass + + if metadata is not None: + xarray.attrs["metadata"] = metadata + + return xarray
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/io/nexus.html b/sed/2.1.0/_modules/sed/io/nexus.html new file mode 100644 index 0000000..55aadfe --- /dev/null +++ b/sed/2.1.0/_modules/sed/io/nexus.html @@ -0,0 +1,536 @@ + + + + + + + + + + sed.io.nexus — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.io.nexus

+"""This module contains NuXus file input/output functions for the sed.io module.
+The conversion is based on the pynxtools from the FAIRmat NFDI consortium.
+For details, see https://github.com/nomad-coe/nomad-parser-nexus
+
+"""
+from typing import Sequence
+from typing import Union
+
+import xarray as xr
+from pynxtools.dataconverter.convert import convert
+
+
+
[docs]def to_nexus( + data: xr.DataArray, + faddr: str, + reader: str, + definition: str, + input_files: Union[str, Sequence[str]], + **kwds, +): + """Saves the x-array provided to a NeXus file at faddr, using the provided reader, + NeXus definition and configuration file. + + Args: + data (xr.DataArray): The data to save, containing metadata definitions in + data._attrs["metadata"]. + faddr (str): The file path to save to. + reader (str): The name of the NeXus reader to use. + definition (str): The NeXus definition to use. + input_files (Union[str, Sequence[str]]): The file path or paths to the additional files to + use. + **kwds: Keyword arguments for ``pynxtools.dataconverter.convert.convert()``. + """ + + if isinstance(input_files, str): + input_files = tuple([input_files]) + else: + input_files = tuple(input_files) + + convert( + input_file=input_files, + objects=(data), + reader=reader, + nxdl=definition, + output=faddr, + **kwds, + )
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/io/tiff.html b/sed/2.1.0/_modules/sed/io/tiff.html new file mode 100644 index 0000000..e7f7bba --- /dev/null +++ b/sed/2.1.0/_modules/sed/io/tiff.html @@ -0,0 +1,709 @@ + + + + + + + + + + sed.io.tiff — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.io.tiff

+"""This module contains tiff file input/output functions for the sed.io module
+
+"""
+from pathlib import Path
+from typing import Sequence
+from typing import Union
+
+import numpy as np
+import tifffile
+import xarray as xr
+
+
+_IMAGEJ_DIMS_ORDER = "TZCYXS"
+_IMAGEJ_DIMS_ALIAS = {
+    "T": [
+        "delayStage",
+        "pumpProbeTime",
+        "time",
+        "delay",
+        "T",
+    ],
+    "Z": [
+        "dldTime",
+        "t",
+        "energy",
+        "e",
+        "E",
+        "binding_energy",
+        "energies",
+        "binding_energies",
+    ],
+    "C": ["C"],
+    "Y": ["dldPosY", "ky", "y", "ypos", "Y"],
+    "X": ["dldPosX", "kx", "x", "xpos", "X"],
+    "S": ["S"],
+}
+
+
+
[docs]def to_tiff( + data: Union[xr.DataArray, np.ndarray], + faddr: Union[Path, str], + alias_dict: dict = None, +): + """Save an array as a .tiff stack compatible with ImageJ + + Args: + data (Union[xr.DataArray, np.ndarray]): data to be saved. If a np.ndarray, + the order is retained. If it is an xarray.DataArray, the order is inferred + from axis_dict instead. ImageJ likes tiff files with axis order as + TZCYXS. Therefore, best axis order in input should be: Time, Energy, + posY, posX. The channels 'C' and 'S' are automatically added and can + be ignored. + faddr (Union[Path, str]): full path and name of file to save. + alias_dict (dict, optional): name pairs for correct axis ordering. Keys should + be any of T,Z,C,Y,X,S. The Corresponding value should be a dimension of the + xarray or the dimension number if a numpy array. This is used to sort the + data in the correct order for imagej standards. If None it tries to guess + the order from the name of the axes or assumes T,Z,C,Y,X,S order for numpy + arrays. Defaults to None. + + Raises: + AttributeError: if more than one axis corresponds to a single dimension + NotImplementedError: if data is not 2,3 or 4 dimensional + TypeError: if data is not a np.ndarray or an xarray.DataArray + """ + out: Union[np.ndarray, xr.DataArray] = None + if isinstance(data, np.ndarray): + # TODO: add sorting by dictionary keys + dim_expansions = {2: [0, 1, 2, 5], 3: [0, 2, 5], 4: [2, 5]} + dims = { + 2: ["x", "y"], + 3: ["x", "y", "energy"], + 4: ["x", "y", "energy", "delay"], + } + try: + out = np.expand_dims(data, dim_expansions[data.ndim]) + except KeyError as exc: + raise NotImplementedError( + f"Only 2-3-4D arrays supported when data is a {type(data)}", + ) from exc + + dims_order = dims[data.ndim] + + elif isinstance(data, xr.DataArray): + dims_order = _fill_missing_dims(list(data.dims), alias_dict=alias_dict) + out = data.expand_dims( + {dim: 1 for dim in dims_order if dim not in data.dims}, + ) + out = out.transpose(*dims_order) + else: + raise TypeError(f"Cannot handle data of type {data.type}") + + faddr = Path(faddr).with_suffix(".tiff") + + tifffile.imwrite(faddr, out.astype(np.float32), imagej=True) + + print(f"Successfully saved {faddr}\n Axes order: {dims_order}")
+ + +def _sort_dims_for_imagej(dims: Sequence, alias_dict: dict = None) -> list: + """Guess the order of the dimensions from the alias dictionary. + + Args: + dims (Sequence): the list of dimensions to sort + alias_dict (dict, optional): name pairs for correct axis ordering. Keys should + be any of T,Z,C,Y,X,S. The Corresponding value should be a dimension of the + xarray or the dimension number if a numpy array. This is used to sort the + data in the correct order for imagej standards. If None it tries to guess + the order from the name of the axes or assumes T,Z,C,Y,X,S order for numpy + arrays. Defaults to None. + + Raises: + ValueError: for duplicate entries for a single imagej dimension + NameError: when a dimension cannot be found in the alias dictionary + + Returns: + list: List of sorted dimension names. + """ + order = _fill_missing_dims(dims=dims, alias_dict=alias_dict) + return [d for d in order if d in dims] + + +def _fill_missing_dims(dims: Sequence, alias_dict: dict = None) -> list: + """Fill in the missing dimensions from the alias dictionary. + + Args: + dims (Sequence): the list of dimensions that are provided + alias_dict (dict, optional): name pairs for correct axis ordering. Keys should + be any of T,Z,C,Y,X,S. The Corresponding value should be a dimension of the + xarray or the dimension number if a numpy array. This is used to sort the + data in the correct order for imagej standards. If None it tries to guess + the order from the name of the axes or assumes T,Z,C,Y,X,S order for numpy + arrays. Defaults to None. + + Raises: + ValueError: for duplicate entries for a single imagej dimension + NameError: when a dimension cannot be found in the alias dictionary + + Returns: + list: augmented list of TIFF dimensions. + """ + order: list = [] + # overwrite the default values with the provided dict + if alias_dict is None: + alias_dict = {} + else: + for k, v in alias_dict.items(): + assert k in _IMAGEJ_DIMS_ORDER, f"keys must all be one of {_IMAGEJ_DIMS_ALIAS}" + if not isinstance(v, (list, tuple)): + alias_dict[k] = [v] + + alias_dict = {**_IMAGEJ_DIMS_ALIAS, **alias_dict} + added_dims = 0 + for imgj_dim in _IMAGEJ_DIMS_ORDER: + found_one = False + for dim in dims: + if dim in alias_dict[imgj_dim]: + if found_one: + raise ValueError( + f"Duplicate entries for {imgj_dim}: {dim} and {order[-1]} ", + ) + order.append(dim) + found_one = True + if not found_one: + order.append(imgj_dim) + added_dims += 1 + if len(order) != len(dims) + added_dims: + raise NameError( + f"Could not interpret dimensions {[d for d in dims if d not in order]}", + ) + return order + + +
[docs]def load_tiff( + faddr: Union[str, Path], + coords: dict = None, + dims: Sequence = None, + attrs: dict = None, +) -> xr.DataArray: + """Loads a tiff stack to an xarray. + + The .tiff format does not retain information on the axes, so these need to + be manually added with the axes argument. Otherwise, this returns the data + only as np.ndarray. + + Args: + faddr (Union[str, Path]): Path to file to load. + coords (dict, optional): The axes describing the data, following the tiff + stack order. Defaults to None. + dims (Sequence, optional): the order of the coordinates provided, considering + the data is ordered as TZCYXS. If None (default) it infers the order from + the order of the coords dictionary. + attrs (dict, optional): dictionary to add as attributes to the + xarray.DataArray. Defaults to None. + + Returns: + xr.DataArray: an xarray representing the data loaded from the .tiff file + """ + data = tifffile.imread(faddr) + + if coords is None: + coords = { + k.replace("_", ""): np.linspace(0, n, n) + for k, n in zip( + _IMAGEJ_DIMS_ORDER, + data.shape, + ) + if n > 1 + } + + data = data.squeeze() + + if dims is None: + dims = list(coords.keys()) + + assert data.ndim == len(dims), ( + f"Data dimension {data.ndim} must coincide number of coordinates " + f"{len(coords)} and dimensions {len(dims)} provided," + ) + return xr.DataArray(data=data, coords=coords, dims=dims, attrs=attrs)
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/base/loader.html b/sed/2.1.0/_modules/sed/loader/base/loader.html new file mode 100644 index 0000000..47627c3 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/base/loader.html @@ -0,0 +1,673 @@ + + + + + + + + + + sed.loader.base.loader — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.base.loader

+"""The abstract class off of which to implement loaders."""
+import os
+from abc import ABC
+from abc import abstractmethod
+from copy import deepcopy
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe as ddf
+import numpy as np
+
+from sed.loader.utils import gather_files
+
+
+
[docs]class BaseLoader(ABC): + """ + The abstract class off of which to implement loaders. + + The reader's folder name is the identifier. + For this BaseLoader with filename base/loader.py the ID becomes 'base' + + Args: + config (dict, optional): Config dictionary. Defaults to None. + meta_handler (MetaHandler, optional): MetaHandler object. Defaults to None. + """ + + # pylint: disable=too-few-public-methods + + __name__ = "BaseLoader" + + supported_file_types: List[str] = [] + + def __init__( + self, + config: dict = None, + ): + self._config = config if config is not None else {} + + self.files: List[str] = [] + self.runs: List[str] = [] + self.metadata: Dict[Any, Any] = {} + +
[docs] @abstractmethod + def read_dataframe( + self, + files: Union[str, Sequence[str]] = None, + folders: Union[str, Sequence[str]] = None, + runs: Union[str, Sequence[str]] = None, + ftype: str = None, + metadata: dict = None, + collect_metadata: bool = False, + **kwds, + ) -> Tuple[ddf.DataFrame, ddf.DataFrame, dict]: + """Reads data from given files, folder, or runs and returns a dask dataframe + and corresponding metadata. + + Args: + files (Union[str, Sequence[str]], optional): File path(s) to process. + Defaults to None. + folders (Union[str, Sequence[str]], optional): Path to folder(s) where files + are stored. Path has priority such that if it's specified, the specified + files will be ignored. Defaults to None. + runs (Union[str, Sequence[str]], optional): Run identifier(s). Corresponding + files will be located in the location provided by ``folders``. Takes + precedence over ``files`` and ``folders``. Defaults to None. + ftype (str, optional): File type to read ('parquet', 'json', 'csv', etc). + If a folder path is given, all files with the specified extension are + read into the dataframe in the reading order. Defaults to None. + metadata (dict, optional): Manual metadata dictionary. Auto-generated + metadata will be added to it. Defaults to None. + collect_metadata (bool): Option to collect metadata from files. Requires + a valid config dict. Defaults to False. + **kwds: keyword arguments. See description in respective loader. + + Returns: + Tuple[ddf.DataFrame, dict]: Dask dataframe, timed dataframe and metadata + read from specified files. + """ + + if metadata is None: + metadata = {} + + if runs is not None: + if isinstance(runs, (str, int)): + runs = [runs] + self.runs = list(runs) + files = [] + for run in runs: + files.extend(self.get_files_from_run_id(run, folders, **kwds)) + + elif folders is not None: + if isinstance(folders, str): + folders = [folders] + files = [] + for folder in folders: + folder = os.path.realpath(folder) + files.extend( + gather_files( + folder=folder, + extension=ftype, + file_sorting=True, + **kwds, + ), + ) + + elif files is None: + raise ValueError( + "Either folder, file paths, or runs should be provided!", + ) + + if files is not None: + if isinstance(files, str): + files = [files] + files = [os.path.realpath(file) for file in files] + self.files = files + + self.metadata = deepcopy(metadata) + + if not files: + raise FileNotFoundError("No valid files or runs found!") + + return None, None, None
+ +
[docs] @abstractmethod + def get_files_from_run_id( + self, + run_id: str, + folders: Union[str, Sequence[str]] = None, + extension: str = None, + **kwds, + ) -> List[str]: + """Locate the files for a given run identifier. + + Args: + run_id (str): The run identifier to locate. + folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw + data is located. Defaults to None. + extension (str, optional): The file extension. Defaults to None. + kwds: Keyword arguments + + Return: + List[str]: List of files for the given run. + """ + raise NotImplementedError
+ +
[docs] @abstractmethod + def get_count_rate( + self, + fids: Sequence[int] = None, + **kwds, + ) -> Tuple[np.ndarray, np.ndarray]: + """Create count rate data for the files specified in ``fids``. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments + + Return: + Tuple[np.ndarray, np.ndarray]: Arrays containing countrate and seconds + into the scan. + """ + return None, None
+ +
[docs] @abstractmethod + def get_elapsed_time(self, fids: Sequence[int] = None, **kwds) -> float: + """Return the elapsed time in the specified in ``fids``. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments + + Return: + float: The elapsed time in the files in seconds. + """ + return None
+ + +LOADER = BaseLoader +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/flash/loader.html b/sed/2.1.0/_modules/sed/loader/flash/loader.html new file mode 100644 index 0000000..1910283 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/flash/loader.html @@ -0,0 +1,1424 @@ + + + + + + + + + + sed.loader.flash.loader — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.flash.loader

+"""
+This module implements the flash data loader.
+This loader currently supports hextof, wespe and instruments with similar structure.
+The raw hdf5 data is combined and saved into buffer files and loaded as a dask dataframe.
+The dataframe is a amalgamation of all h5 files for a combination of runs, where the NaNs are
+automatically forward filled across different files.
+This can then be saved as a parquet for out-of-sed processing and reread back to access other
+sed functionality.
+"""
+import time
+from functools import reduce
+from pathlib import Path
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe as dd
+import h5py
+import numpy as np
+import pyarrow.parquet as pq
+from joblib import delayed
+from joblib import Parallel
+from natsort import natsorted
+from pandas import DataFrame
+from pandas import MultiIndex
+from pandas import Series
+
+from sed.core import dfops
+from sed.loader.base.loader import BaseLoader
+from sed.loader.flash.metadata import MetadataRetriever
+from sed.loader.utils import parse_h5_keys
+from sed.loader.utils import split_dld_time_from_sector_id
+
+
+
[docs]class FlashLoader(BaseLoader): + """ + The class generates multiindexed multidimensional pandas dataframes from the new FLASH + dataformat resolved by both macro and microbunches alongside electrons. + Only the read_dataframe (inherited and implemented) method is accessed by other modules. + """ + + __name__ = "flash" + + supported_file_types = ["h5"] + + def __init__(self, config: dict) -> None: + super().__init__(config=config) + self.multi_index = ["trainId", "pulseId", "electronId"] + self.index_per_electron: MultiIndex = None + self.index_per_pulse: MultiIndex = None + self.failed_files_error: List[str] = [] + +
[docs] def initialize_paths(self) -> Tuple[List[Path], Path]: + """ + Initializes the paths based on the configuration. + + Returns: + Tuple[List[Path], Path]: A tuple containing a list of raw data directories + paths and the parquet data directory path. + + Raises: + ValueError: If required values are missing from the configuration. + FileNotFoundError: If the raw data directories are not found. + """ + # Parses to locate the raw beamtime directory from config file + if "paths" in self._config["core"]: + data_raw_dir = [ + Path(self._config["core"]["paths"].get("data_raw_dir", "")), + ] + data_parquet_dir = Path( + self._config["core"]["paths"].get("data_parquet_dir", ""), + ) + + else: + try: + beamtime_id = self._config["core"]["beamtime_id"] + year = self._config["core"]["year"] + daq = self._config["dataframe"]["daq"] + except KeyError as exc: + raise ValueError( + "The beamtime_id, year and daq are required.", + ) from exc + + beamtime_dir = Path( + self._config["dataframe"]["beamtime_dir"][self._config["core"]["beamline"]], + ) + beamtime_dir = beamtime_dir.joinpath(f"{year}/data/{beamtime_id}/") + + # Use pathlib walk to reach the raw data directory + data_raw_dir = [] + raw_path = beamtime_dir.joinpath("raw") + + for path in raw_path.glob("**/*"): + if path.is_dir(): + dir_name = path.name + if dir_name.startswith("express-") or dir_name.startswith( + "online-", + ): + data_raw_dir.append(path.joinpath(daq)) + elif dir_name == daq.upper(): + data_raw_dir.append(path) + + if not data_raw_dir: + raise FileNotFoundError("Raw data directories not found.") + + parquet_path = "processed/parquet" + data_parquet_dir = beamtime_dir.joinpath(parquet_path) + + data_parquet_dir.mkdir(parents=True, exist_ok=True) + + return data_raw_dir, data_parquet_dir
+ +
[docs] def get_files_from_run_id( + self, + run_id: str, + folders: Union[str, Sequence[str]] = None, + extension: str = "h5", + **kwds, + ) -> List[str]: + """Returns a list of filenames for a given run located in the specified directory + for the specified data acquisition (daq). + + Args: + run_id (str): The run identifier to locate. + folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw + data is located. Defaults to config["core"]["base_folder"]. + extension (str, optional): The file extension. Defaults to "h5". + kwds: Keyword arguments: + - daq (str): The data acquisition identifier. + + Returns: + List[str]: A list of path strings representing the collected file names. + + Raises: + FileNotFoundError: If no files are found for the given run in the directory. + """ + # Define the stream name prefixes based on the data acquisition identifier + stream_name_prefixes = self._config["dataframe"]["stream_name_prefixes"] + + if folders is None: + folders = self._config["core"]["base_folder"] + + if isinstance(folders, str): + folders = [folders] + + daq = kwds.pop("daq", self._config.get("dataframe", {}).get("daq")) + + # Generate the file patterns to search for in the directory + file_pattern = f"{stream_name_prefixes[daq]}_run{run_id}_*." + extension + + files: List[Path] = [] + # Use pathlib to search for matching files in each directory + for folder in folders: + files.extend( + natsorted( + Path(folder).glob(file_pattern), + key=lambda filename: str(filename).rsplit("_", maxsplit=1)[-1], + ), + ) + + # Check if any files are found + if not files: + raise FileNotFoundError( + f"No files found for run {run_id} in directory {str(folders)}", + ) + + # Return the list of found files + return [str(file.resolve()) for file in files]
+ + @property + def available_channels(self) -> List: + """Returns the channel names that are available for use, + excluding pulseId, defined by the json file""" + available_channels = list(self._config["dataframe"]["channels"].keys()) + available_channels.remove("pulseId") + return available_channels + +
[docs] def get_channels(self, formats: Union[str, List[str]] = "", index: bool = False) -> List[str]: + """ + Returns a list of channels associated with the specified format(s). + + Args: + formats (Union[str, List[str]]): The desired format(s) + ('per_pulse', 'per_electron', 'per_train', 'all'). + index (bool): If True, includes channels from the multi_index. + + Returns: + List[str]: A list of channels with the specified format(s). + """ + # If 'formats' is a single string, convert it to a list for uniform processing. + if isinstance(formats, str): + formats = [formats] + + # If 'formats' is a string "all", gather all possible formats. + if formats == ["all"]: + channels = self.get_channels(["per_pulse", "per_train", "per_electron"], index) + return channels + + channels = [] + for format_ in formats: + # Gather channels based on the specified format(s). + channels.extend( + key + for key in self.available_channels + if self._config["dataframe"]["channels"][key]["format"] == format_ + and key != "dldAux" + ) + # Include 'dldAuxChannels' if the format is 'per_pulse'. + if format_ == "per_pulse": + channels.extend( + self._config["dataframe"]["channels"]["dldAux"]["dldAuxChannels"].keys(), + ) + + # Include channels from multi_index if 'index' is True. + if index: + channels.extend(self.multi_index) + + return channels
+ +
[docs] def reset_multi_index(self) -> None: + """Resets the index per pulse and electron""" + self.index_per_electron = None + self.index_per_pulse = None
+ +
[docs] def create_multi_index_per_electron(self, h5_file: h5py.File) -> None: + """ + Creates an index per electron using pulseId for usage with the electron + resolved pandas DataFrame. + + Args: + h5_file (h5py.File): The HDF5 file object. + + Notes: + - This method relies on the 'pulseId' channel to determine + the macrobunch IDs. + - It creates a MultiIndex with trainId, pulseId, and electronId + as the index levels. + """ + + # Macrobunch IDs obtained from the pulseId channel + [train_id, np_array] = self.create_numpy_array_per_channel( + h5_file, + "pulseId", + ) + + # Create a series with the macrobunches as index and + # microbunches as values + macrobunches = ( + Series( + (np_array[i] for i in train_id.index), + name="pulseId", + index=train_id, + ) + - self._config["dataframe"]["ubid_offset"] + ) + + # Explode dataframe to get all microbunch vales per macrobunch, + # remove NaN values and convert to type int + microbunches = macrobunches.explode().dropna().astype(int) + + # Create temporary index values + index_temp = MultiIndex.from_arrays( + (microbunches.index, microbunches.values), + names=["trainId", "pulseId"], + ) + + # Calculate the electron counts per pulseId unique preserves the order of appearance + electron_counts = index_temp.value_counts()[index_temp.unique()].values + + # Series object for indexing with electrons + electrons = ( + Series( + [np.arange(electron_counts[i]) for i in range(electron_counts.size)], + ) + .explode() + .astype(int) + ) + + # Create a pandas MultiIndex using the exploded datasets + self.index_per_electron = MultiIndex.from_arrays( + (microbunches.index, microbunches.values, electrons), + names=self.multi_index, + )
+ +
[docs] def create_multi_index_per_pulse( + self, + train_id: Series, + np_array: np.ndarray, + ) -> None: + """ + Creates an index per pulse using a pulse resolved channel's macrobunch ID, for usage with + the pulse resolved pandas DataFrame. + + Args: + train_id (Series): The train ID Series. + np_array (np.ndarray): The numpy array containing the pulse resolved data. + + Notes: + - This method creates a MultiIndex with trainId and pulseId as the index levels. + """ + + # Create a pandas MultiIndex, useful for comparing electron and + # pulse resolved dataframes + self.index_per_pulse = MultiIndex.from_product( + (train_id, np.arange(0, np_array.shape[1])), + names=["trainId", "pulseId"], + )
+ +
[docs] def create_numpy_array_per_channel( + self, + h5_file: h5py.File, + channel: str, + ) -> Tuple[Series, np.ndarray]: + """ + Returns a numpy array for a given channel name for a given file. + + Args: + h5_file (h5py.File): The h5py file object. + channel (str): The name of the channel. + + Returns: + Tuple[Series, np.ndarray]: A tuple containing the train ID Series and the numpy array + for the channel's data. + + """ + # Get the data from the necessary h5 file and channel + group = h5_file[self._config["dataframe"]["channels"][channel]["group_name"]] + + channel_dict = self._config["dataframe"]["channels"][channel] # channel parameters + + train_id = Series(group["index"], name="trainId") # macrobunch + + # unpacks the timeStamp or value + if channel == "timeStamp": + np_array = group["time"][()] + else: + np_array = group["value"][()] + + # Use predefined axis and slice from the json file + # to choose correct dimension for necessary channel + if "slice" in channel_dict: + np_array = np.take( + np_array, + channel_dict["slice"], + axis=1, + ) + return train_id, np_array
+ +
[docs] def create_dataframe_per_electron( + self, + np_array: np.ndarray, + train_id: Series, + channel: str, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per electron]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + train_id (Series): The train ID Series. + channel (str): The name of the channel. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + + Notes: + The microbunch resolved data is exploded and converted to a DataFrame. The MultiIndex + is set, and the NaN values are dropped, alongside the pulseId = 0 (meaningless). + + """ + return ( + Series((np_array[i] for i in train_id.index), name=channel) + .explode() + .dropna() + .to_frame() + .set_index(self.index_per_electron) + .drop( + index=np.arange(-self._config["dataframe"]["ubid_offset"], 0), + level=1, + errors="ignore", + ) + )
+ +
[docs] def create_dataframe_per_pulse( + self, + np_array: np.ndarray, + train_id: Series, + channel: str, + channel_dict: dict, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per pulse]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + train_id (Series): The train ID Series. + channel (str): The name of the channel. + channel_dict (dict): The dictionary containing channel parameters. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + + Notes: + - For auxiliary channels, the macrobunch resolved data is repeated 499 times to be + compared to electron resolved data for each auxiliary channel. The data is then + converted to a multicolumn DataFrame. + - For all other pulse resolved channels, the macrobunch resolved data is exploded + to a DataFrame and the MultiIndex is set. + + """ + + # Special case for auxiliary channels + if channel == "dldAux": + # Checks the channel dictionary for correct slices and creates a multicolumn DataFrame + data_frames = ( + Series( + (np_array[i, value] for i in train_id.index), + name=key, + index=train_id, + ).to_frame() + for key, value in channel_dict["dldAuxChannels"].items() + ) + + # Multiindex set and combined dataframe returned + data = reduce(DataFrame.combine_first, data_frames) + + # For all other pulse resolved channels + else: + # Macrobunch resolved data is exploded to a DataFrame and the MultiIndex is set + + # Creates the index_per_pulse for the given channel + self.create_multi_index_per_pulse(train_id, np_array) + data = ( + Series((np_array[i] for i in train_id.index), name=channel) + .explode() + .to_frame() + .set_index(self.index_per_pulse) + ) + + return data
+ +
[docs] def create_dataframe_per_train( + self, + np_array: np.ndarray, + train_id: Series, + channel: str, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per train]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + train_id (Series): The train ID Series. + channel (str): The name of the channel. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + """ + return ( + Series((np_array[i] for i in train_id.index), name=channel) + .to_frame() + .set_index(train_id) + )
+ +
[docs] def create_dataframe_per_channel( + self, + h5_file: h5py.File, + channel: str, + ) -> Union[Series, DataFrame]: + """ + Returns a pandas DataFrame for a given channel name from a given file. + + This method takes an h5py.File object `h5_file` and a channel name `channel`, and returns + a pandas DataFrame containing the data for that channel from the file. The format of the + DataFrame depends on the channel's format specified in the configuration. + + Args: + h5_file (h5py.File): The h5py.File object representing the HDF5 file. + channel (str): The name of the channel. + + Returns: + Union[Series, DataFrame]: A pandas Series or DataFrame representing the channel's data. + + Raises: + ValueError: If the channel has an undefined format. + + """ + [train_id, np_array] = self.create_numpy_array_per_channel( + h5_file, + channel, + ) # numpy Array created + channel_dict = self._config["dataframe"]["channels"][channel] # channel parameters + + # If np_array is size zero, fill with NaNs + if np_array.size == 0: + # Fill the np_array with NaN values of the same shape as train_id + np_array = np.full_like(train_id, np.nan, dtype=np.double) + # Create a Series using np_array, with train_id as the index + data = Series( + (np_array[i] for i in train_id.index), + name=channel, + index=train_id, + ) + + # Electron resolved data is treated here + if channel_dict["format"] == "per_electron": + # If index_per_electron is None, create it for the given file + if self.index_per_electron is None: + self.create_multi_index_per_electron(h5_file) + + # Create a DataFrame for electron-resolved data + data = self.create_dataframe_per_electron( + np_array, + train_id, + channel, + ) + + # Pulse resolved data is treated here + elif channel_dict["format"] == "per_pulse": + # Create a DataFrame for pulse-resolved data + data = self.create_dataframe_per_pulse( + np_array, + train_id, + channel, + channel_dict, + ) + + # Train resolved data is treated here + elif channel_dict["format"] == "per_train": + # Create a DataFrame for train-resolved data + data = self.create_dataframe_per_train(np_array, train_id, channel) + + else: + raise ValueError( + channel + + "has an undefined format. Available formats are \ + per_pulse, per_electron and per_train", + ) + + return data
+ +
[docs] def concatenate_channels( + self, + h5_file: h5py.File, + ) -> DataFrame: + """ + Concatenates the channels from the provided h5py.File into a pandas DataFrame. + + This method takes an h5py.File object `h5_file` and concatenates the channels present in + the file into a single pandas DataFrame. The concatenation is performed based on the + available channels specified in the configuration. + + Args: + h5_file (h5py.File): The h5py.File object representing the HDF5 file. + + Returns: + DataFrame: A concatenated pandas DataFrame containing the channels. + + Raises: + ValueError: If the group_name for any channel does not exist in the file. + + """ + all_keys = parse_h5_keys(h5_file) # Parses all channels present + + # Check for if the provided group_name actually exists in the file + for channel in self._config["dataframe"]["channels"]: + if channel == "timeStamp": + group_name = self._config["dataframe"]["channels"][channel]["group_name"] + "time" + else: + group_name = self._config["dataframe"]["channels"][channel]["group_name"] + "value" + + if group_name not in all_keys: + raise ValueError( + f"The group_name for channel {channel} does not exist.", + ) + + # Create a generator expression to generate data frames for each channel + data_frames = ( + self.create_dataframe_per_channel(h5_file, each) for each in self.available_channels + ) + + # Use the reduce function to join the data frames into a single DataFrame + return reduce( + lambda left, right: left.join(right, how="outer"), + data_frames, + )
+ +
[docs] def create_dataframe_per_file( + self, + file_path: Path, + ) -> DataFrame: + """ + Create pandas DataFrames for the given file. + + This method loads an HDF5 file specified by `file_path` and constructs a pandas DataFrame + from the datasets within the file. The order of datasets in the DataFrames is the opposite + of the order specified by channel names. + + Args: + file_path (Path): Path to the input HDF5 file. + + Returns: + DataFrame: pandas DataFrame + + """ + # Loads h5 file and creates a dataframe + with h5py.File(file_path, "r") as h5_file: + self.reset_multi_index() # Reset MultiIndexes for next file + df = self.concatenate_channels(h5_file) + df = df.dropna(subset=self._config["dataframe"].get("tof_column", "dldTimeSteps")) + # correct the 3 bit shift which encodes the detector ID in the 8s time + if self._config["dataframe"].get("split_sector_id_from_dld_time", False): + df = split_dld_time_from_sector_id(df, config=self._config) + return df
+ +
[docs] def create_buffer_file(self, h5_path: Path, parquet_path: Path) -> Union[bool, Exception]: + """ + Converts an HDF5 file to Parquet format to create a buffer file. + + This method uses `create_dataframe_per_file` method to create dataframes from individual + files within an HDF5 file. The resulting dataframe is then saved to a Parquet file. + + Args: + h5_path (Path): Path to the input HDF5 file. + parquet_path (Path): Path to the output Parquet file. + + Raises: + ValueError: If an error occurs during the conversion process. + + """ + try: + ( + self.create_dataframe_per_file(h5_path) + .reset_index(level=self.multi_index) + .to_parquet(parquet_path, index=False) + ) + except Exception as exc: # pylint: disable=broad-except + self.failed_files_error.append(f"{parquet_path}: {type(exc)} {exc}") + return exc + return None
+ +
[docs] def buffer_file_handler( + self, + data_parquet_dir: Path, + detector: str, + force_recreate: bool, + ) -> Tuple[List[Path], List, List]: + """ + Handles the conversion of buffer files (h5 to parquet) and returns the filenames. + + Args: + data_parquet_dir (Path): Directory where the parquet files will be stored. + detector (str): Detector name. + force_recreate (bool): Forces recreation of buffer files + + Returns: + Tuple[List[Path], List, List]: Three lists, one for + parquet file paths, one for metadata and one for schema. + + Raises: + FileNotFoundError: If the conversion fails for any files or no data is available. + """ + + # Create the directory for buffer parquet files + buffer_file_dir = data_parquet_dir.joinpath("buffer") + buffer_file_dir.mkdir(parents=True, exist_ok=True) + + # Create two separate lists for h5 and parquet file paths + h5_filenames = [Path(file) for file in self.files] + parquet_filenames = [ + buffer_file_dir.joinpath(Path(file).stem + detector) for file in self.files + ] + existing_parquet_filenames = [file for file in parquet_filenames if file.exists()] + + # Raise a value error if no data is available after the conversion + if len(h5_filenames) == 0: + raise ValueError("No data available. Probably failed reading all h5 files") + + if not force_recreate: + # Check if the available channels match the schema of the existing parquet files + parquet_schemas = [pq.read_schema(file) for file in existing_parquet_filenames] + config_schema = set(self.get_channels(formats="all", index=True)) + if self._config["dataframe"].get("split_sector_id_from_dld_time", False): + config_schema.add(self._config["dataframe"].get("sector_id_column", False)) + + for i, schema in enumerate(parquet_schemas): + schema_set = set(schema.names) + if schema_set != config_schema: + missing_in_parquet = config_schema - schema_set + missing_in_config = schema_set - config_schema + + missing_in_parquet_str = ( + f"Missing in parquet: {missing_in_parquet}" if missing_in_parquet else "" + ) + missing_in_config_str = ( + f"Missing in config: {missing_in_config}" if missing_in_config else "" + ) + + raise ValueError( + "The available channels do not match the schema of file", + f"{existing_parquet_filenames[i]}", + f"{missing_in_parquet_str}", + f"{missing_in_config_str}", + "Please check the configuration file or set force_recreate to True.", + ) + + # Choose files to read + files_to_read = [ + (h5_path, parquet_path) + for h5_path, parquet_path in zip(h5_filenames, parquet_filenames) + if force_recreate or not parquet_path.exists() + ] + + print(f"Reading files: {len(files_to_read)} new files of {len(h5_filenames)} total.") + + # Initialize the indices for create_buffer_file conversion + self.reset_multi_index() + + # Convert the remaining h5 files to parquet in parallel if there are any + if len(files_to_read) > 0: + error = Parallel(n_jobs=len(files_to_read), verbose=10)( + delayed(self.create_buffer_file)(h5_path, parquet_path) + for h5_path, parquet_path in files_to_read + ) + if any(error): + raise RuntimeError(f"Conversion failed for some files. {error}") + + # Raise an error if the conversion failed for any files + # TODO: merge this and the previous error trackings + if self.failed_files_error: + raise FileNotFoundError( + "Conversion failed for the following files:\n" + "\n".join(self.failed_files_error), + ) + + print("All files converted successfully!") + + # read all parquet metadata and schema + metadata = [pq.read_metadata(file) for file in parquet_filenames] + schema = [pq.read_schema(file) for file in parquet_filenames] + + return parquet_filenames, metadata, schema
+ +
[docs] def parquet_handler( + self, + data_parquet_dir: Path, + detector: str = "", + parquet_path: Path = None, + converted: bool = False, + load_parquet: bool = False, + save_parquet: bool = False, + force_recreate: bool = False, + ) -> Tuple[dd.DataFrame, dd.DataFrame]: + """ + Handles loading and saving of parquet files based on the provided parameters. + + Args: + data_parquet_dir (Path): Directory where the parquet files are located. + detector (str, optional): Adds a identifier for parquets to distinguish multidetector + systems. + parquet_path (str, optional): Path to the combined parquet file. + converted (bool, optional): True if data is augmented by adding additional columns + externally and saved into converted folder. + load_parquet (bool, optional): Loads the entire parquet into the dd dataframe. + save_parquet (bool, optional): Saves the entire dataframe into a parquet. + force_recreate (bool, optional): Forces recreation of buffer file. + Returns: + tuple: A tuple containing two dataframes: + - dataframe_electron: Dataframe containing the loaded/augmented electron data. + - dataframe_pulse: Dataframe containing the loaded/augmented timed data. + + Raises: + FileNotFoundError: If the requested parquet file is not found. + + """ + + # Construct the parquet path if not provided + if parquet_path is None: + parquet_name = "_".join(str(run) for run in self.runs) + parquet_dir = data_parquet_dir.joinpath("converted") if converted else data_parquet_dir + + parquet_path = parquet_dir.joinpath( + "run_" + parquet_name + detector, + ).with_suffix(".parquet") + + # Check if load_parquet is flagged and then load the file if it exists + if load_parquet: + try: + dataframe_electron = dd.read_parquet(parquet_path) + dataframe_pulse = dataframe_electron + except Exception as exc: + raise FileNotFoundError( + "The final parquet for this run(s) does not exist yet. " + "If it is in another location, please provide the path as parquet_path.", + ) from exc + + else: + # Obtain the parquet filenames, metadata and schema from the method + # which handles buffer file creation/reading + filenames, metadata, _ = self.buffer_file_handler( + data_parquet_dir, + detector, + force_recreate, + ) + + # Read all parquet files into one dataframe using dask + dataframe = dd.read_parquet(filenames, calculate_divisions=True) + + # Channels to fill NaN values + channels: List[str] = self.get_channels(["per_pulse", "per_train"]) + + overlap = min(file.num_rows for file in metadata) + + print("Filling nan values...") + dataframe = dfops.forward_fill_lazy( + df=dataframe, + columns=channels, + before=overlap, + iterations=self._config["dataframe"].get("forward_fill_iterations", 2), + ) + # Remove the NaNs from per_electron channels + dataframe_electron = dataframe.dropna( + subset=self.get_channels(["per_electron"]), + ) + dataframe_pulse = dataframe[ + self.multi_index + self.get_channels(["per_pulse", "per_train"]) + ] + dataframe_pulse = dataframe_pulse[ + (dataframe_pulse["electronId"] == 0) | (np.isnan(dataframe_pulse["electronId"])) + ] + + # Save the dataframe as parquet if requested + if save_parquet: + dataframe_electron.compute().reset_index(drop=True).to_parquet(parquet_path) + print("Combined parquet file saved.") + + return dataframe_electron, dataframe_pulse
+ +
[docs] def parse_metadata(self, scicat_token: str = None) -> dict: + """Uses the MetadataRetriever class to fetch metadata from scicat for each run. + + Returns: + dict: Metadata dictionary + scicat_token (str, optional):: The scicat token to use for fetching metadata + """ + metadata_retriever = MetadataRetriever(self._config["metadata"], scicat_token) + metadata = metadata_retriever.get_metadata( + beamtime_id=self._config["core"]["beamtime_id"], + runs=self.runs, + metadata=self.metadata, + ) + + return metadata
+ +
[docs] def get_count_rate( + self, + fids: Sequence[int] = None, # noqa: ARG002 + **kwds, # noqa: ARG002 + ): + return None, None
+ +
[docs] def get_elapsed_time(self, fids=None, **kwds): # noqa: ARG002 + return None
+ +
[docs] def read_dataframe( + self, + files: Union[str, Sequence[str]] = None, + folders: Union[str, Sequence[str]] = None, + runs: Union[str, Sequence[str]] = None, + ftype: str = "h5", + metadata: dict = None, + collect_metadata: bool = False, + **kwds, + ) -> Tuple[dd.DataFrame, dd.DataFrame, dict]: + """ + Read express data from the DAQ, generating a parquet in between. + + Args: + files (Union[str, Sequence[str]], optional): File path(s) to process. Defaults to None. + folders (Union[str, Sequence[str]], optional): Path to folder(s) where files are stored + Path has priority such that if it's specified, the specified files will be ignored. + Defaults to None. + runs (Union[str, Sequence[str]], optional): Run identifier(s). Corresponding files will + be located in the location provided by ``folders``. Takes precedence over + ``files`` and ``folders``. Defaults to None. + ftype (str, optional): The file extension type. Defaults to "h5". + metadata (dict, optional): Additional metadata. Defaults to None. + collect_metadata (bool, optional): Whether to collect metadata. Defaults to False. + + Returns: + Tuple[dd.DataFrame, dict]: A tuple containing the concatenated DataFrame and metadata. + + Raises: + ValueError: If neither 'runs' nor 'files'/'data_raw_dir' is provided. + FileNotFoundError: If the conversion fails for some files or no data is available. + """ + t0 = time.time() + + data_raw_dir, data_parquet_dir = self.initialize_paths() + + # Prepare a list of names for the runs to read and parquets to write + if runs is not None: + files = [] + if isinstance(runs, (str, int)): + runs = [runs] + for run in runs: + run_files = self.get_files_from_run_id( + run_id=run, + folders=[str(folder.resolve()) for folder in data_raw_dir], + extension=ftype, + daq=self._config["dataframe"]["daq"], + ) + files.extend(run_files) + self.runs = list(runs) + super().read_dataframe(files=files, ftype=ftype) + + else: + # This call takes care of files and folders. As we have converted runs into files + # already, they are just stored in the class by this call. + super().read_dataframe( + files=files, + folders=folders, + ftype=ftype, + metadata=metadata, + ) + + df, df_timed = self.parquet_handler(data_parquet_dir, **kwds) + + metadata = self.parse_metadata(**kwds) if collect_metadata else {} + print(f"loading complete in {time.time() - t0: .2f} s") + + return df, df_timed, metadata
+ + +LOADER = FlashLoader +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/flash/metadata.html b/sed/2.1.0/_modules/sed/loader/flash/metadata.html new file mode 100644 index 0000000..45e121e --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/flash/metadata.html @@ -0,0 +1,615 @@ + + + + + + + + + + sed.loader.flash.metadata — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.flash.metadata

+"""
+The module provides a MetadataRetriever class for retrieving metadata
+from a Scicat Instance based on beamtime and run IDs.
+"""
+
+import warnings
+from typing import Dict
+from typing import Optional
+
+import requests
+
+
+
[docs]class MetadataRetriever: + """ + A class for retrieving metadata from a Scicat instance based + on beamtime and run IDs. + """ + + def __init__(self, metadata_config: Dict, scicat_token: str = None) -> None: + """ + Initializes the MetadataRetriever class. + + Args: + metadata_config (dict): Takes a dict containing + at least url, and optionally token for the scicat instance. + scicat_token (str, optional): The token to use for fetching metadata. + """ + self.token = metadata_config.get("scicat_token", None) + if scicat_token: + self.token = scicat_token + self.url = metadata_config.get("scicat_url", None) + + if not self.token or not self.url: + raise ValueError("No URL or token provided for fetching metadata from scicat.") + + self.headers = { + "Content-Type": "application/json", + "Accept": "application/json", + } + self.token = metadata_config["scicat_token"] + +
[docs] def get_metadata( + self, + beamtime_id: str, + runs: list, + metadata: Optional[Dict] = None, + ) -> Dict: + """ + Retrieves metadata for a given beamtime ID and list of runs. + + Args: + beamtime_id (str): The ID of the beamtime. + runs (list): A list of run IDs. + metadata (Dict, optional): The existing metadata dictionary. + Defaults to None. + + Returns: + Dict: The updated metadata dictionary. + + Raises: + Exception: If the request to retrieve metadata fails. + """ + # If metadata is not provided, initialize it as an empty dictionary + if metadata is None: + metadata = {} + + # Iterate over the list of runs + for run in runs: + pid = f"{beamtime_id}/{run}" + # Retrieve metadata for each run and update the overall metadata dictionary + metadata_run = self._get_metadata_per_run(pid) + metadata.update( + metadata_run, + ) # TODO: Not correct for multiple runs + + return metadata
+ + def _get_metadata_per_run(self, pid: str) -> Dict: + """ + Retrieves metadata for a specific run based on the PID. + + Args: + pid (str): The PID of the run. + + Returns: + Dict: The retrieved metadata. + + Raises: + Exception: If the request to retrieve metadata fails. + """ + headers2 = dict(self.headers) + headers2["Authorization"] = "Bearer {}".format(self.token) + + try: + dataset_response = requests.get( + self._create_new_dataset_url(pid), + headers=headers2, + timeout=10, + ) + dataset_response.raise_for_status() + # Check if response is an empty object because wrong url for older implementation + if not dataset_response.content: + dataset_response = requests.get( + self._create_old_dataset_url(pid), headers=headers2, timeout=10 + ) + # If the dataset request is successful, return the retrieved metadata + # as a JSON object + return dataset_response.json() + except requests.exceptions.RequestException as exception: + # If the request fails, raise warning + print(warnings.warn(f"Failed to retrieve metadata for PID {pid}: {str(exception)}")) + return {} # Return an empty dictionary for this run + + def _create_old_dataset_url(self, pid: str) -> str: + return "{burl}/{url}/%2F{npid}".format( + burl=self.url, url="Datasets", npid=self._reformat_pid(pid) + ) + + def _create_new_dataset_url(self, pid: str) -> str: + return "{burl}/{url}/{npid}".format( + burl=self.url, url="Datasets", npid=self._reformat_pid(pid) + ) + + def _reformat_pid(self, pid: str) -> str: + """SciCat adds a pid-prefix + "/" but at DESY prefix = "" """ + return (pid).replace("/", "%2F")
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/generic/loader.html b/sed/2.1.0/_modules/sed/loader/generic/loader.html new file mode 100644 index 0000000..b8377e3 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/generic/loader.html @@ -0,0 +1,649 @@ + + + + + + + + + + sed.loader.generic.loader — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.generic.loader

+"""
+module sed.loader.mpes, code for loading hdf5 files delayed into a dask dataframe.
+Mostly ported from https://github.com/mpes-kit/mpes.
+@author: L. Rettig
+"""
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe as ddf
+import numpy as np
+
+from sed.loader.base.loader import BaseLoader
+
+
+
[docs]class GenericLoader(BaseLoader): + """Dask implementation of the Loader. Reads from various file types using the + utilities of Dask. + + Args: + config (dict, optional): Config dictionary. Defaults to None. + meta_handler (MetaHandler, optional): MetaHandler object. Defaults to None. + """ + + __name__ = "generic" + + supported_file_types = ["parquet", "csv", "json"] + +
[docs] def read_dataframe( + self, + files: Union[str, Sequence[str]] = None, + folders: Union[str, Sequence[str]] = None, + runs: Union[str, Sequence[str]] = None, + ftype: str = "parquet", + metadata: dict = None, + collect_metadata: bool = False, + **kwds, + ) -> Tuple[ddf.DataFrame, ddf.DataFrame, dict]: + """Read stored files from a folder into a dataframe. + + Args: + files (Union[str, Sequence[str]], optional): File path(s) to process. + Defaults to None. + folders (Union[str, Sequence[str]], optional): Path to folder(s) where files + are stored. Path has priority such that if it's specified, the specified + files will be ignored. Defaults to None. + runs (Union[str, Sequence[str]], optional): Run identifier(s). Corresponding + files will be located in the location provided by ``folders``. Takes + precedence over ``files`` and ``folders``. Defaults to None. + ftype (str, optional): File type to read ('parquet', 'json', 'csv', etc). + If a folder path is given, all files with the specified extension are + read into the dataframe in the reading order. Defaults to "parquet". + metadata (dict, optional): Manual meta data dictionary. Auto-generated + meta data are added to it. Defaults to None. + collect_metadata (bool): Option to collect metadata from files. Requires + a valid config dict. Defaults to False. + **kwds: keyword arguments. See the keyword arguments for the specific file + parser in``dask.dataframe`` module. + + Raises: + ValueError: Raised if neither files nor folder provided. + FileNotFoundError: Raised if the files or folder cannot be found. + ValueError: Raised if the file type is not supported. + + Returns: + Tuple[ddf.DataFrame, dict]: Dask dataframe, timed dataframe and metadata + read from specified files. + """ + # pylint: disable=duplicate-code + super().read_dataframe( + files=files, + folders=folders, + runs=runs, + ftype=ftype, + metadata=metadata, + ) + + if not self.files: + raise FileNotFoundError("No valid files found!") + + if collect_metadata: + # TODO implementation + self.metadata = self.metadata + + if ftype == "parquet": + return (ddf.read_parquet(self.files, **kwds), None, self.metadata) + + if ftype == "json": + return (ddf.read_json(self.files, **kwds), None, self.metadata) + + if ftype == "csv": + return (ddf.read_csv(self.files, **kwds), None, self.metadata) + + try: + return (ddf.read_table(self.files, **kwds), None, self.metadata) + except (TypeError, ValueError, NotImplementedError) as exc: + raise ValueError( + "The file format cannot be understood!", + ) from exc
+ +
[docs] def get_files_from_run_id( + self, + run_id: str, # noqa: ARG002 + folders: Union[str, Sequence[str]] = None, # noqa: ARG002 + extension: str = None, # noqa: ARG002 + **kwds, # noqa: ARG002 + ) -> List[str]: + """Locate the files for a given run identifier. + + Args: + run_id (str): The run identifier to locate. + folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw + data is located. Defaults to None. + extension (str, optional): The file extension. Defaults to "h5". + kwds: Keyword arguments + + Return: + str: Path to the location of run data. + """ + raise NotImplementedError
+ +
[docs] def get_count_rate( + self, + fids: Sequence[int] = None, # noqa: ARG002 + **kwds, # noqa: ARG002 + ) -> Tuple[np.ndarray, np.ndarray]: + """Create count rate data for the files specified in ``fids``. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments + + Return: + Tuple[np.ndarray, np.ndarray]: Arrays containing countrate and seconds + into the scan. + """ + # TODO + return None, None
+ +
[docs] def get_elapsed_time( + self, + fids: Sequence[int] = None, # noqa: ARG002 + **kwds, # noqa: ARG002 + ) -> float: + """Return the elapsed time in the files specified in ``fids``. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments + + Returns: + float: The elapsed time in the files in seconds. + """ + return None
+ + +LOADER = GenericLoader +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/loader_interface.html b/sed/2.1.0/_modules/sed/loader/loader_interface.html new file mode 100644 index 0000000..9b178d0 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/loader_interface.html @@ -0,0 +1,553 @@ + + + + + + + + + + sed.loader.loader_interface — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.loader_interface

+"""Interface to select a specified loader
+"""
+import glob
+import importlib.util
+import os
+from typing import List
+
+from sed.loader.base.loader import BaseLoader
+
+
+
[docs]def get_loader( + loader_name: str, + config: dict = None, +) -> BaseLoader: + """Helper function to get the loader object from it's given name. + + Args: + loader_name (str): Name of the loader + config (dict, optional): Configuration dictionary. Defaults to None. + + Raises: + ValueError: Raised if the loader cannot be found. + + Returns: + BaseLoader: The loader object. + """ + + if config is None: + config = {} + + path_prefix = f"{os.path.dirname(__file__)}{os.sep}" if os.path.dirname(__file__) else "" + path = os.path.join(path_prefix, loader_name, "loader.py") + if not os.path.exists(path): + error_str = f"Invalid loader {loader_name}. Available loaders are: [" + for loader in get_names_of_all_loaders(): + error_str += f"{loader}, " + error_str += "]." + raise ValueError(error_str) + + spec = importlib.util.spec_from_file_location("loader.py", path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module.LOADER(config=config)
+ + +
[docs]def get_names_of_all_loaders() -> List[str]: + """Helper function to populate a list of all available loaders. + + Returns: + List[str]: List of all detected loader names. + """ + path_prefix = f"{os.path.dirname(__file__)}{os.sep}" if os.path.dirname(__file__) else "" + files = glob.glob(os.path.join(path_prefix, "*", "loader.py")) + all_loaders = [] + for file in files: + if f"{os.sep}base{os.sep}" not in file: + index_of_loaders_folder_name = file.rindex( + f"loader{os.sep}", + ) + len(f"loader{os.sep}") + index_of_last_path_sep = file.rindex(os.sep) + all_loaders.append( + file[index_of_loaders_folder_name:index_of_last_path_sep], + ) + return all_loaders
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/mirrorutil.html b/sed/2.1.0/_modules/sed/loader/mirrorutil.html new file mode 100644 index 0000000..56471aa --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/mirrorutil.html @@ -0,0 +1,850 @@ + + + + + + + + + + sed.loader.mirrorutil — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.mirrorutil

+"""
+module sed.loader.mirrorutil, code for transparently mirroring file system trees to a
+second (local) location. This is speeds up binning of data stored on network drives
+tremendously.
+Mostly ported from https://github.com/mpes-kit/mpes.
+@author: L. Rettig
+"""
+import errno
+import os
+import shutil
+from datetime import datetime
+from typing import List
+
+import dask as d
+from dask.diagnostics import ProgressBar
+
+
+
[docs]class CopyTool: + """File collecting and sorting class. + + Args: + source (str): Source path for the copy tool. + dest (str): Destination path for the copy tool. + """ + + def __init__( + self, + source: str, + dest: str, + **kwds, + ): + self.source = source + self.dest = dest + self.safety_margin = kwds.pop( + "safetyMargin", + 1 * 2**30, + ) # Default 500 GB safety margin + self.gid = kwds.pop("gid", 5050) + self.scheduler = kwds.pop("scheduler", "threads") + + # Default to 25 concurrent copy tasks + self.ntasks = int(kwds.pop("ntasks", 25)) + +
[docs] def copy( + self, + source: str, + force_copy: bool = False, + **compute_kwds, + ) -> str: + """Local file copying method. + + Args: + source (str): source path + force_copy (bool, optional): re-copy all files. Defaults to False. + + Raises: + FileNotFoundError: Raised if the source path is not found or empty. + OSError: Raised if the target disk is full. + + Returns: + str: Path of the copied source directory mapped into the target tree + """ + + if not os.path.exists(source): + raise FileNotFoundError("Source not found!") + + filenames = [] + dirnames = [] + + if os.path.isfile(source): + # Single file + sdir = os.path.dirname(os.path.realpath(source)) + ddir = get_target_dir( + sdir, + self.source, + self.dest, + gid=self.gid, + mode=0o775, + create=True, + ) + filenames.append(os.path.realpath(source)) + + elif os.path.isdir(source): + sdir = os.path.realpath(source) + ddir = get_target_dir( + sdir, + self.source, + self.dest, + gid=self.gid, + mode=0o775, + create=True, + ) + # dirs.append(sdir) + for path, dirs, files in os.walk(sdir): + for file in files: + filenames.append(os.path.join(path, file)) + for directory in dirs: + dirnames.append(os.path.join(path, directory)) + + if not filenames: + raise FileNotFoundError("No files found at path!") + + # actual copy loop + # Check space left + size_src = 0 + size_dst = 0 + for sfile in filenames: + size_src += os.path.getsize(sfile) + if os.path.exists(sfile.replace(sdir, ddir)): + size_dst += os.path.getsize(sfile.replace(sdir, ddir)) + if size_src == 0 and not force_copy: + # nothing to copy, just return directory + return ddir + free = shutil.disk_usage(ddir).free + if size_src - size_dst > free - self.safety_margin: + raise OSError( + errno.ENOSPC, + f"Target disk full, only {free / 2**30} GB free, " + + f"but {(size_src - size_dst) / 2**30} GB needed!", + ) + + # make directories + for directory in dirnames: + dest_dir = directory.replace(sdir, ddir) + mymakedirs(dest_dir, gid=self.gid, mode=0o775) + + copy_tasks = [] # Core-level jobs + for src_file in filenames: + dest_file = src_file.replace(sdir, ddir) + size_src = os.path.getsize(src_file) + if os.path.exists(dest_file): + size_dst = os.path.getsize(dest_file) + else: + size_dst = 0 + if not os.path.exists(dest_file) or size_dst != size_src or force_copy: + if os.path.exists(dest_file): + # delete existing file, to fix permission issue + copy_tasks.append( + d.delayed(mycopy)( + src_file, + dest_file, + gid=self.gid, + mode=0o664, + replace=True, + ), + ) + else: + copy_tasks.append( + d.delayed(mycopy)( + src_file, + dest_file, + gid=self.gid, + mode=0o664, + ), + ) + + # run the copy tasks + if len(copy_tasks) > 0: + print("Copy Files...") + with ProgressBar(): + d.compute( + *copy_tasks, + scheduler=self.scheduler, + num_workers=self.ntasks, + **compute_kwds, + ) + print("Copy finished!") + + if os.path.isdir(source): + return ddir + + return dest_file
+ +
[docs] def size(self, sdir: str) -> int: + """Calculate file size. + + Args: + sdir (str): Path to source directory + + Returns: + int: Size of files in source path + """ + + size = 0 + for path, dirs, filenames in os.walk( # pylint: disable=W0612 + sdir, + ): + # Check space left + for sfile in filenames: + size += os.path.getsize(os.path.join(sdir, sfile)) + + return size
+ +
[docs] def cleanup_oldest_scan( + self, + force: bool = False, + report: bool = False, + ): + """Remove scans in old directories. Looks for the directory with the oldest + ctime and queries the user to confirm for its deletion. + + Args: + force (bool, optional): Forces to automatically remove the oldest scan. + Defaults to False. + report (bool, optional): Print a report with all directories in dest, + sorted by age. Defaults to False. + + Raises: + FileNotFoundError: Raised if no scans to remove are found. + """ + + # get list of all Scan directories (leaf directories) + scan_dirs = [] + for root, dirs, files in os.walk( # pylint: disable=W0612 + self.dest, + ): + if not dirs: + scan_dirs.append(root) + + scan_dirs = sorted(scan_dirs, key=os.path.getctime) + if report: + print( + "Last accessed Size Path", + ) + total_size = 0 + for scan in scan_dirs: + size = 0 + for path, dirs, filenames in os.walk( # pylint: disable=W0612 + scan, + ): + for sfile in filenames: + size += os.path.getsize(os.path.join(scan, sfile)) + total_size += size + if size > 0: + print( + f"{datetime.fromtimestamp(os.path.getctime(scan))}, ", + f"{(size/2**30):.2f} GB, {scan}", + ) + print(f"Total size: {(total_size/2**30):.2f} GB.") + oldest_scan = None + for scan in scan_dirs: + size = 0 + for path, dirs, filenames in os.walk( # pylint: disable=W0612 + scan, + ): + for sfile in filenames: + size += os.path.getsize(os.path.join(scan, sfile)) + if size > 0: + oldest_scan = scan + break + if oldest_scan is None: + raise FileNotFoundError("No scan with data found to remove!") + + print( + f'Oldest scan is "{oldest_scan}", removing it will free ', + f"{(size/2**30):.2f} GB space.", + ) + if force: + proceed = "y" + else: + print("Proceed (y/n)?") + proceed = input() + if proceed == "y": + shutil.rmtree(oldest_scan) + print("Removed successfully!") + else: + print("Aborted.")
+ + +# private Functions +
[docs]def get_target_dir( + sdir: str, + source: str, + dest: str, + gid: int, + mode: int, + create: bool = False, +) -> str: + """Retrieve target directory. + + Args: + sdir (str): Source directory to copy + source (str): source root path + dest (str): destination root path + gid (int): Group id + mode (int): Unix mode + create (bool, optional): Wether to create directories. Defaults to False. + + Raises: + NotADirectoryError: Raised if sdir is not a directory + ValueError: Raised if sdir not inside of source + + Returns: + str: The mapped target directory inside dest + """ + + if not os.path.isdir(sdir): + raise NotADirectoryError("Only works for directories!") + + dirs = [] + head, tail = os.path.split(sdir) + dirs.append(tail) + while not os.path.samefile(head, source): + if os.path.samefile(head, "/"): + raise ValueError("sdir needs to be inside of source!") + + head, tail = os.path.split(head) + dirs.append(tail) + + dirs.reverse() + ddir = dest + for directory in dirs: + ddir = os.path.join(ddir, directory) + if create and not os.path.exists(ddir): + mymakedirs(ddir, mode, gid) + return ddir
+ + +# replacement for os.makedirs, which is independent of umask +
[docs]def mymakedirs(path: str, mode: int, gid: int) -> List[str]: + """Creates a directory path iteratively from its root + + Args: + path (str): Path of the directory to create + mode (int): Unix access mode of created directories + gid (int): Group id of created directories + + Returns: + str: Path of created directories + """ + + if not path or os.path.exists(path): + return [] + head, tail = os.path.split(path) # pylint: disable=W0612 + res = mymakedirs(head, mode, gid) + os.mkdir(path) + os.chmod(path, mode) + os.chown(path, -1, gid) + res.append(path) + return res
+ + +
[docs]def mycopy(source: str, dest: str, gid: int, mode: int, replace: bool = False): + """Copy function with option to delete the target file firs (to take ownership). + + Args: + source (str): Path to the source file + dest (str): Path to the destination file + gid (int): Group id to be set for the destination file + mode (int): Unix access mode to be set for the destination file + replace (bool, optional): Option to replace an existing file. + Defaults to False. + """ + + if replace: + if os.path.exists(dest): + os.remove(dest) + shutil.copy2(source, dest) + # fix permissions and group ownership: + os.chown(dest, -1, gid) + os.chmod(dest, mode)
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/mpes/loader.html b/sed/2.1.0/_modules/sed/loader/mpes/loader.html new file mode 100644 index 0000000..8440390 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/mpes/loader.html @@ -0,0 +1,1454 @@ + + + + + + + + + + sed.loader.mpes.loader — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.mpes.loader

+"""
+module sed.loader.mpes, code for loading hdf5 files delayed into a dask dataframe.
+Mostly ported from https://github.com/mpes-kit/mpes.
+@author: L. Rettig
+"""
+import datetime
+import glob
+import json
+import os
+from typing import Dict
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+from urllib.error import HTTPError
+from urllib.error import URLError
+from urllib.request import urlopen
+
+import dask
+import dask.array as da
+import dask.dataframe as ddf
+import h5py
+import numpy as np
+import scipy.interpolate as sint
+from natsort import natsorted
+
+from sed.loader.base.loader import BaseLoader
+
+
+
[docs]def hdf5_to_dataframe( + files: Sequence[str], + group_names: Sequence[str] = None, + alias_dict: Dict[str, str] = None, + time_stamps: bool = False, + time_stamp_alias: str = "timeStamps", + ms_markers_group: str = "msMarkers", + first_event_time_stamp_key: str = "FirstEventTimeStamp", + **kwds, +) -> ddf.DataFrame: + """Function to read a selection of hdf5-files, and generate a delayed dask + dataframe from provided groups in the files. Optionally, aliases can be defined. + + Args: + files (List[str]): A list of the file paths to load. + group_names (List[str], optional): hdf5 group names to load. Defaults to load + all groups containing "Stream" + alias_dict (Dict[str, str], optional): Dictionary of aliases for the dataframe + columns. Keys are the hdf5 groupnames, and values the aliases. If an alias + is not found, its group name is used. Defaults to read the attribute + "Name" from each group. + time_stamps (bool, optional): Option to calculate time stamps. Defaults to + False. + time_stamp_alias (str): Alias name for the timestamp column. + Defaults to "timeStamps". + ms_markers_group (str): h5 column containing timestamp information. + Defaults to "msMarkers". + first_event_time_stamp_key (str): h5 attribute containing the start + timestamp of a file. Defaults to "FirstEventTimeStamp". + + Returns: + ddf.DataFrame: The delayed Dask DataFrame + """ + if group_names is None: + group_names = [] + if alias_dict is None: + alias_dict = {} + + # Read a file to parse the file structure + test_fid = kwds.pop("test_fid", 0) + test_proc = h5py.File(files[test_fid]) + if group_names == []: + group_names, alias_dict = get_groups_and_aliases( + h5file=test_proc, + search_pattern="Stream", + ) + + column_names = [alias_dict.get(group, group) for group in group_names] + + if time_stamps: + column_names.append(time_stamp_alias) + + test_array = hdf5_to_array( + h5file=test_proc, + group_names=group_names, + time_stamps=time_stamps, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + ) + + # Delay-read all files + arrays = [ + da.from_delayed( + dask.delayed(hdf5_to_array)( + h5file=h5py.File(f), + group_names=group_names, + time_stamps=time_stamps, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + ), + dtype=test_array.dtype, + shape=(test_array.shape[0], np.nan), + ) + for f in files + ] + array_stack = da.concatenate(arrays, axis=1).T + + return ddf.from_dask_array(array_stack, columns=column_names)
+ + +
[docs]def hdf5_to_timed_dataframe( + files: Sequence[str], + group_names: Sequence[str] = None, + alias_dict: Dict[str, str] = None, + time_stamps: bool = False, + time_stamp_alias: str = "timeStamps", + ms_markers_group: str = "msMarkers", + first_event_time_stamp_key: str = "FirstEventTimeStamp", + **kwds, +) -> ddf.DataFrame: + """Function to read a selection of hdf5-files, and generate a delayed dask + dataframe from provided groups in the files. Optionally, aliases can be defined. + Returns a dataframe for evenly spaced time intervals. + + Args: + files (List[str]): A list of the file paths to load. + group_names (List[str], optional): hdf5 group names to load. Defaults to load + all groups containing "Stream" + alias_dict (Dict[str, str], optional): Dictionary of aliases for the dataframe + columns. Keys are the hdf5 groupnames, and values the aliases. If an alias + is not found, its group name is used. Defaults to read the attribute + "Name" from each group. + time_stamps (bool, optional): Option to calculate time stamps. Defaults to + False. + time_stamp_alias (str): Alias name for the timestamp column. + Defaults to "timeStamps". + ms_markers_group (str): h5 column containing timestamp information. + Defaults to "msMarkers". + first_event_time_stamp_key (str): h5 attribute containing the start + timestamp of a file. Defaults to "FirstEventTimeStamp". + + Returns: + ddf.DataFrame: The delayed Dask DataFrame + """ + if group_names is None: + group_names = [] + if alias_dict is None: + alias_dict = {} + + # Read a file to parse the file structure + test_fid = kwds.pop("test_fid", 0) + test_proc = h5py.File(files[test_fid]) + if group_names == []: + group_names, alias_dict = get_groups_and_aliases( + h5file=test_proc, + search_pattern="Stream", + ) + + column_names = [alias_dict.get(group, group) for group in group_names] + + if time_stamps: + column_names.append(time_stamp_alias) + + test_array = hdf5_to_timed_array( + h5file=test_proc, + group_names=group_names, + time_stamps=time_stamps, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + ) + + # Delay-read all files + arrays = [ + da.from_delayed( + dask.delayed(hdf5_to_timed_array)( + h5file=h5py.File(f), + group_names=group_names, + time_stamps=time_stamps, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + ), + dtype=test_array.dtype, + shape=(test_array.shape[0], np.nan), + ) + for f in files + ] + array_stack = da.concatenate(arrays, axis=1).T + + return ddf.from_dask_array(array_stack, columns=column_names)
+ + +
[docs]def get_groups_and_aliases( + h5file: h5py.File, + search_pattern: str = None, + alias_key: str = "Name", +) -> Tuple[List[str], Dict[str, str]]: + """Read groups and aliases from a provided hdf5 file handle + + Args: + h5file (h5py.File): + The hdf5 file handle + search_pattern (str, optional): + Search pattern to select groups. Defaults to include all groups. + alias_key (str, optional): + Attribute key where aliases are stored. Defaults to "Name". + + Returns: + Tuple[List[str], Dict[str, str]]: + The list of groupnames and the alias dictionary parsed from the file + """ + # get group names: + group_names = list(h5file) + + # Filter the group names + if search_pattern is None: + filtered_group_names = group_names + else: + filtered_group_names = [name for name in group_names if search_pattern in name] + + alias_dict = {} + for name in filtered_group_names: + alias_dict[name] = get_attribute(h5file[name], alias_key) + + return filtered_group_names, alias_dict
+ + +
[docs]def hdf5_to_array( + h5file: h5py.File, + group_names: Sequence[str], + data_type: str = "float32", + time_stamps=False, + ms_markers_group: str = "msMarkers", + first_event_time_stamp_key: str = "FirstEventTimeStamp", +) -> np.ndarray: + """Reads the content of the given groups in an hdf5 file, and returns a + 2-dimensional array with the corresponding values. + + Args: + h5file (h5py.File): + hdf5 file handle to read from + group_names (str): + group names to read + data_type (str, optional): + Data type of the output data. Defaults to "float32". + time_stamps (bool, optional): + Option to calculate time stamps. Defaults to False. + ms_markers_group (str): h5 column containing timestamp information. + Defaults to "msMarkers". + first_event_time_stamp_key (str): h5 attribute containing the start + timestamp of a file. Defaults to "FirstEventTimeStamp". + + Returns: + np.ndarray: The 2-dimensional data array containing the values of the groups. + """ + + # Delayed array for loading an HDF5 file of reasonable size (e.g. < 1GB) + + # Read out groups: + data_list = [] + for group in group_names: + g_dataset = np.asarray(h5file[group]) + if bool(data_type): + g_dataset = g_dataset.astype(data_type) + data_list.append(g_dataset) + + # calculate time stamps + if time_stamps: + # create target array for time stamps + time_stamp_data = np.zeros(len(data_list[0])) + # the ms marker contains a list of events that occurred at full ms intervals. + # It's monotonically increasing, and can contain duplicates + ms_marker = np.asarray(h5file[ms_markers_group]) + + # try to get start timestamp from "FirstEventTimeStamp" attribute + try: + start_time_str = get_attribute(h5file, first_event_time_stamp_key) + start_time = datetime.datetime.strptime( + start_time_str, + "%Y-%m-%dT%H:%M:%S.%f%z", + ).timestamp() + except KeyError: + # get the start time of the file from its modification date if the key + # does not exist (old files) + start_time = os.path.getmtime(h5file.filename) # convert to ms + # the modification time points to the time when the file was finished, so we + # need to correct for the time it took to write the file + start_time -= len(ms_marker) / 1000 + + # fill in range before 1st marker + time_stamp_data[0 : ms_marker[0]] = start_time + for i in range(len(ms_marker) - 1): + # linear interpolation between ms: Disabled, because it takes a lot of + # time, and external signals are anyway not better synchronized than 1 ms + # time_stamp_data[ms_marker[n] : ms_marker[n + 1]] = np.linspace( + # start_time + n, + # start_time + n + 1, + # ms_marker[n + 1] - ms_marker[n], + # ) + time_stamp_data[ms_marker[i] : ms_marker[i + 1]] = start_time + (i + 1) / 1000 + # fill any remaining points + time_stamp_data[ms_marker[len(ms_marker) - 1] : len(time_stamp_data)] = ( + start_time + len(ms_marker) / 1000 + ) + + data_list.append(time_stamp_data) + + return np.asarray(data_list)
+ + +
[docs]def hdf5_to_timed_array( + h5file: h5py.File, + group_names: Sequence[str], + data_type: str = "float32", + time_stamps=False, + ms_markers_group: str = "msMarkers", + first_event_time_stamp_key: str = "FirstEventTimeStamp", +) -> np.ndarray: + """Reads the content of the given groups in an hdf5 file, and returns a + timed version of a 2-dimensional array with the corresponding values. + + Args: + h5file (h5py.File): + hdf5 file handle to read from + group_names (str): + group names to read + data_type (str, optional): + Data type of the output data. Defaults to "float32". + time_stamps (bool, optional): + Option to calculate time stamps. Defaults to False. + ms_markers_group (str): h5 column containing timestamp information. + Defaults to "msMarkers". + first_event_time_stamp_key (str): h5 attribute containing the start + timestamp of a file. Defaults to "FirstEventTimeStamp". + + Returns: + np.ndarray: the array of the values at evenly spaced timing obtained from + the ms_markers. + """ + + # Delayed array for loading an HDF5 file of reasonable size (e.g. < 1GB) + + # Read out groups: + data_list = [] + ms_marker = np.asarray(h5file[ms_markers_group]) + for group in group_names: + g_dataset = np.asarray(h5file[group]) + if bool(data_type): + g_dataset = g_dataset.astype(data_type) + + timed_dataset = np.zeros_like(ms_marker) + for i, point in enumerate(ms_marker): + timed_dataset[i] = g_dataset[int(point) - 1] + + data_list.append(timed_dataset) + + # calculate time stamps + if time_stamps: + # try to get start timestamp from "FirstEventTimeStamp" attribute + try: + start_time_str = get_attribute(h5file, first_event_time_stamp_key) + start_time = datetime.datetime.strptime( + start_time_str, + "%Y-%m-%dT%H:%M:%S.%f%z", + ).timestamp() + except KeyError: + # get the start time of the file from its modification date if the key + # does not exist (old files) + start_time = os.path.getmtime(h5file.filename) # convert to ms + # the modification time points to the time when the file was finished, so we + # need to correct for the time it took to write the file + start_time -= len(ms_marker) / 1000 + + time_stamp_data = start_time + np.arange(len(ms_marker)) / 1000 + + data_list.append(time_stamp_data) + + return np.asarray(data_list)
+ + +
[docs]def get_attribute(h5group: h5py.Group, attribute: str) -> str: + """Reads, decodes and returns an attribute from an hdf5 group + + Args: + h5group (h5py.Group): + The hdf5 group to read from + attribute (str): + The name of the attribute + + Returns: + str: The parsed attribute data + """ + try: + content = h5group.attrs[attribute].decode("utf-8") + except AttributeError: # No need to decode + content = h5group.attrs[attribute] + except KeyError as exc: # No such attribute + raise KeyError(f"Attribute '{attribute}' not found!") from exc + + return content
+ + +
[docs]def get_count_rate( + h5file: h5py.File, + ms_markers_group: str = "msMarkers", +) -> Tuple[np.ndarray, np.ndarray]: + """Create count rate in the file from the msMarker column. + + Args: + h5file (h5py.File): The h5file from which to get the count rate. + ms_markers_group (str, optional): The hdf5 group where the millisecond markers + are stored. Defaults to "msMarkers". + + Returns: + Tuple[np.ndarray, np.ndarray]: The count rate in Hz and the seconds into the + scan. + """ + ms_markers = np.asarray(h5file[ms_markers_group]) + secs = np.arange(0, len(ms_markers)) / 1000 + msmarker_spline = sint.InterpolatedUnivariateSpline(secs, ms_markers, k=1) + rate_spline = msmarker_spline.derivative() + count_rate = rate_spline(secs) + + return (count_rate, secs)
+ + +
[docs]def get_elapsed_time( + h5file: h5py.File, + ms_markers_group: str = "msMarkers", +) -> float: + """Return the elapsed time in the file from the msMarkers wave + + Args: + h5file (h5py.File): The h5file from which to get the count rate. + ms_markers_group (str, optional): The hdf5 group where the millisecond markers + are stored. Defaults to "msMarkers". + + Return: + float: The acquisition time of the file in seconds. + """ + secs = h5file[ms_markers_group].len() / 1000 + + return secs
+ + +
[docs]def get_archiver_data( + archiver_url: str, + archiver_channel: str, + ts_from: float, + ts_to: float, +) -> Tuple[np.ndarray, np.ndarray]: + """Extract time stamps and corresponding data from and EPICS archiver instance + + Args: + archiver_url (str): URL of the archiver data extraction interface + archiver_channel (str): EPICS channel to extract data for + ts_from (float): starting time stamp of the range of interest + ts_to (float): ending time stamp of the range of interest + + Returns: + Tuple[List, List]: The extracted time stamps and corresponding data + """ + iso_from = datetime.datetime.utcfromtimestamp(ts_from).isoformat() + iso_to = datetime.datetime.utcfromtimestamp(ts_to).isoformat() + req_str = archiver_url + archiver_channel + "&from=" + iso_from + "Z&to=" + iso_to + "Z" + with urlopen(req_str) as req: + data = json.load(req) + secs = [x["secs"] + x["nanos"] * 1e-9 for x in data[0]["data"]] + vals = [x["val"] for x in data[0]["data"]] + + return (np.asarray(secs), np.asarray(vals))
+ + +
[docs]class MpesLoader(BaseLoader): + """Mpes implementation of the Loader. Reads from h5 files or folders of the + SPECS Metis 1000 (FHI Berlin) + + Args: + config (dict, optional): Config dictionary. Defaults to None. + meta_handler (MetaHandler, optional): MetaHandler object. Defaults to None. + """ + + __name__ = "mpes" + + supported_file_types = ["h5"] + + def __init__( + self, + config: dict = None, + ): + super().__init__(config=config) + + self.read_timestamps = self._config.get("dataframe", {}).get( + "read_timestamps", + False, + ) + +
[docs] def read_dataframe( + self, + files: Union[str, Sequence[str]] = None, + folders: Union[str, Sequence[str]] = None, + runs: Union[str, Sequence[str]] = None, + ftype: str = "h5", + metadata: dict = None, + collect_metadata: bool = False, + time_stamps: bool = False, + **kwds, + ) -> Tuple[ddf.DataFrame, ddf.DataFrame, dict]: + """Read stored hdf5 files from a list or from folder and returns a dask + dataframe and corresponding metadata. + + Args: + files (Union[str, Sequence[str]], optional): File path(s) to process. + Defaults to None. + folders (Union[str, Sequence[str]], optional): Path to folder(s) where files + are stored. Path has priority such that if it's specified, the specified + files will be ignored. Defaults to None. + runs (Union[str, Sequence[str]], optional): Run identifier(s). Corresponding + files will be located in the location provided by ``folders``. Takes + precedence over ``files`` and ``folders``. Defaults to None. + ftype (str, optional): File extension to use. If a folder path is given, + all files with the specified extension are read into the dataframe + in the reading order. Defaults to "h5". + metadata (dict, optional): Manual meta data dictionary. Auto-generated + meta data are added to it. Defaults to None. + collect_metadata (bool): Option to collect metadata from files. Requires + a valid config dict. Defaults to False. + time_stamps (bool, optional): Option to create a time_stamp column in + the dataframe from ms-Markers in the files. Defaults to False. + **kwds: Keyword parameters. + + - **hdf5_groupnames** : List of groupnames to look for in the file. + - **hdf5_aliases**: Dictionary of aliases for the groupnames. + - **time_stamp_alias**: Alias for the timestamp column + - **ms_markers_group**: Group name of the millisecond marker column. + - **first_event_time_stamp_key**: Attribute name containing the start + timestamp of the file. + + Additional keywords are passed to ``hdf5_to_dataframe``. + + Raises: + ValueError: raised if neither files or folder provided. + FileNotFoundError: Raised if a file or folder is not found. + + Returns: + Tuple[ddf.DataFrame, ddf.DataFrame, dict]: Dask dataframe, timed Dask + dataframe and metadata read from specified files. + """ + # if runs is provided, try to locate the respective files relative to the provided folder. + if runs is not None: # pylint: disable=duplicate-code + files = [] + if isinstance(runs, (str, int)): + runs = [runs] + for run in runs: + files.extend( + self.get_files_from_run_id(run_id=run, folders=folders, extension=ftype), + ) + self.runs = list(runs) + super().read_dataframe( + files=files, + ftype=ftype, + metadata=metadata, + ) + else: + # pylint: disable=duplicate-code + super().read_dataframe( + files=files, + folders=folders, + runs=runs, + ftype=ftype, + metadata=metadata, + ) + + hdf5_groupnames = kwds.pop( + "hdf5_groupnames", + self._config.get("dataframe", {}).get("hdf5_groupnames", []), + ) + hdf5_aliases = kwds.pop( + "hdf5_aliases", + self._config.get("dataframe", {}).get("hdf5_aliases", {}), + ) + time_stamp_alias = kwds.pop( + "time_stamp_alias", + self._config.get("dataframe", {}).get( + "time_stamp_alias", + "timeStamps", + ), + ) + ms_markers_group = kwds.pop( + "ms_markers_group", + self._config.get("dataframe", {}).get( + "ms_markers_group", + "msMarkers", + ), + ) + first_event_time_stamp_key = kwds.pop( + "first_event_time_stamp_key", + self._config.get("dataframe", {}).get( + "first_event_time_stamp_key", + "FirstEventTimeStamp", + ), + ) + df = hdf5_to_dataframe( + files=self.files, + group_names=hdf5_groupnames, + alias_dict=hdf5_aliases, + time_stamps=time_stamps, + time_stamp_alias=time_stamp_alias, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + **kwds, + ) + timed_df = hdf5_to_timed_dataframe( + files=self.files, + group_names=hdf5_groupnames, + alias_dict=hdf5_aliases, + time_stamps=time_stamps, + time_stamp_alias=time_stamp_alias, + ms_markers_group=ms_markers_group, + first_event_time_stamp_key=first_event_time_stamp_key, + **kwds, + ) + + if collect_metadata: + metadata = self.gather_metadata( + files=self.files, + metadata=self.metadata, + ) + else: + metadata = self.metadata + + return df, timed_df, metadata
+ +
[docs] def get_files_from_run_id( + self, + run_id: str, + folders: Union[str, Sequence[str]] = None, + extension: str = "h5", + **kwds, # noqa: ARG002 + ) -> List[str]: + """Locate the files for a given run identifier. + + Args: + run_id (str): The run identifier to locate. + folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw + data is located. Defaults to config["core"]["base_folder"] + extension (str, optional): The file extension. Defaults to "h5". + kwds: Keyword arguments + + Return: + List[str]: List of file path strings to the location of run data. + """ + if folders is None: + folders = self._config["core"]["paths"]["data_raw_dir"] + + if isinstance(folders, str): + folders = [folders] + + files: List[str] = [] + for folder in folders: + run_files = natsorted( + glob.glob( + folder + "/**/Scan" + str(run_id).zfill(4) + "_*." + extension, + recursive=True, + ), + ) + files.extend(run_files) + + # Check if any files are found + if not files: + raise FileNotFoundError( + f"No files found for run {run_id} in directory {str(folders)}", + ) + + # Return the list of found files + return files
+ +
[docs] def get_start_and_end_time(self) -> Tuple[float, float]: + """Extract the start and end time stamps from the loaded files + + Returns: + Tuple[float, float]: A tuple containing the start and end time stamps + """ + h5file = h5py.File(self.files[0]) + timestamps = hdf5_to_array( + h5file, + group_names=self._config["dataframe"]["hdf5_groupnames"], + time_stamps=True, + ) + ts_from = timestamps[-1][1] + h5file = h5py.File(self.files[-1]) + timestamps = hdf5_to_array( + h5file, + group_names=self._config["dataframe"]["hdf5_groupnames"], + time_stamps=True, + ) + ts_to = timestamps[-1][-1] + return (ts_from, ts_to)
+ +
[docs] def gather_metadata( + self, + files: Sequence[str], + metadata: dict = None, + ) -> dict: + """Collect meta data from files + + Args: + files (Sequence[str]): List of files loaded + metadata (dict, optional): Manual meta data dictionary. Auto-generated + meta data are added to it. Defaults to None. + + Returns: + dict: The completed metadata dictionary. + """ + + if metadata is None: + metadata = {} + print("Gathering metadata from different locations") + # Read events in with ms time stamps + print("Collecting time stamps...") + (ts_from, ts_to) = self.get_start_and_end_time() + + metadata["timing"] = { + "acquisition_start": datetime.datetime.utcfromtimestamp(ts_from) + .replace(tzinfo=datetime.timezone.utc) + .isoformat(), + "acquisition_stop": datetime.datetime.utcfromtimestamp(ts_to) + .replace(tzinfo=datetime.timezone.utc) + .isoformat(), + "acquisition_duration": int(ts_to - ts_from), + "collection_time": float(ts_to - ts_from), + } + + # import meta data from data file + if "file" not in metadata: # If already present, the value is assumed to be a dictionary + metadata["file"] = {} + + print("Collecting file metadata...") + with h5py.File(files[0], "r") as h5file: + for key, value in h5file.attrs.items(): + key = key.replace("VSet", "V") + metadata["file"][key] = value + + metadata["entry_identifier"] = os.path.dirname( + os.path.realpath(files[0]), + ) + + print("Collecting data from the EPICS archive...") + # Get metadata from Epics archive if not present already + epics_channels = self._config["metadata"]["epics_pvs"] + + start = datetime.datetime.utcfromtimestamp(ts_from).isoformat() + + channels_missing = set(epics_channels) - set( + metadata["file"].keys(), + ) + for channel in channels_missing: + try: + _, vals = get_archiver_data( + archiver_url=self._config["metadata"].get("archiver_url"), + archiver_channel=channel, + ts_from=ts_from, + ts_to=ts_to, + ) + metadata["file"][f"{channel}"] = np.mean(vals) + + except IndexError: + metadata["file"][f"{channel}"] = np.nan + print( + f"Data for channel {channel} doesn't exist for time {start}", + ) + except HTTPError as exc: + print( + f"Incorrect URL for the archive channel {channel}. " + "Make sure that the channel name and file start and end times are " + "correct.", + ) + print("Error code: ", exc) + except URLError as exc: + print( + f"Cannot access the archive URL for channel {channel}. " + f"Make sure that you are within the FHI network." + f"Skipping over channels {channels_missing}.", + ) + print("Error code: ", exc) + break + + # Determine the correct aperture_config + stamps = sorted( + list(self._config["metadata"]["aperture_config"]) + [start], + ) + current_index = stamps.index(start) + timestamp = stamps[current_index - 1] # pick last configuration before file date + + # Aperture metadata + if "instrument" not in metadata.keys(): + metadata["instrument"] = {"analyzer": {}} + metadata["instrument"]["analyzer"]["fa_shape"] = "circle" + metadata["instrument"]["analyzer"]["ca_shape"] = "circle" + metadata["instrument"]["analyzer"]["fa_size"] = np.nan + metadata["instrument"]["analyzer"]["ca_size"] = np.nan + # get field aperture shape and size + if { + self._config["metadata"]["fa_in_channel"], + self._config["metadata"]["fa_hor_channel"], + }.issubset(set(metadata["file"].keys())): + fa_in = metadata["file"][self._config["metadata"]["fa_in_channel"]] + fa_hor = metadata["file"][self._config["metadata"]["fa_hor_channel"]] + for key, value in self._config["metadata"]["aperture_config"][timestamp][ + "fa_size" + ].items(): + if value[0][0] < fa_in < value[0][1] and value[1][0] < fa_hor < value[1][1]: + try: + k_float = float(key) + metadata["instrument"]["analyzer"]["fa_size"] = k_float + except ValueError: # store string if numeric interpretation fails + metadata["instrument"]["analyzer"]["fa_shape"] = key + break + else: + print("Field aperture size not found.") + + # get contrast aperture shape and size + if self._config["metadata"]["ca_in_channel"] in metadata["file"]: + ca_in = metadata["file"][self._config["metadata"]["ca_in_channel"]] + for key, value in self._config["metadata"]["aperture_config"][timestamp][ + "ca_size" + ].items(): + if value[0] < ca_in < value[1]: + try: + k_float = float(key) + metadata["instrument"]["analyzer"]["ca_size"] = k_float + except ValueError: # store string if numeric interpretation fails + metadata["instrument"]["analyzer"]["ca_shape"] = key + break + else: + print("Contrast aperture size not found.") + + # Storing the lens modes corresponding to lens voltages. + # Use lens voltages present in first lens_mode entry. + lens_list = self._config["metadata"]["lens_mode_config"][ + next(iter(self._config["metadata"]["lens_mode_config"])) + ].keys() + + lens_volts = np.array( + [metadata["file"].get(f"KTOF:Lens:{lens}:V", np.NaN) for lens in lens_list], + ) + for mode, value in self._config["metadata"]["lens_mode_config"].items(): + lens_volts_config = np.array([value[k] for k in lens_list]) + if np.allclose( + lens_volts, + lens_volts_config, + rtol=0.005, + ): # Equal upto 0.5% tolerance + metadata["instrument"]["analyzer"]["lens_mode"] = mode + break + else: + print( + "Lens mode for given lens voltages not found. " + "Storing lens mode from the user, if provided.", + ) + + # Determining projection from the lens mode + try: + lens_mode = metadata["instrument"]["analyzer"]["lens_mode"] + if "spatial" in lens_mode.split("_")[1]: + metadata["instrument"]["analyzer"]["projection"] = "real" + metadata["instrument"]["analyzer"]["scheme"] = "momentum dispersive" + else: + metadata["instrument"]["analyzer"]["projection"] = "reciprocal" + metadata["instrument"]["analyzer"]["scheme"] = "spatial dispersive" + except IndexError: + print( + "Lens mode must have the form, '6kV_kmodem4.0_20VTOF_v3.sav'. " + "Can't determine projection. " + "Storing projection from the user, if provided.", + ) + except KeyError: + print( + "Lens mode not found. Can't determine projection. " + "Storing projection from the user, if provided.", + ) + + return metadata
+ +
[docs] def get_count_rate( + self, + fids: Sequence[int] = None, + **kwds, + ) -> Tuple[np.ndarray, np.ndarray]: + """Create count rate from the msMarker column for the files specified in + ``fids``. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments: + + - **ms_markers_group**: Name of the hdf5 group containing the ms-markers + + Returns: + Tuple[np.ndarray, np.ndarray]: Arrays containing countrate and seconds + into the scan. + """ + if fids is None: + fids = range(0, len(self.files)) + + ms_markers_group = kwds.pop( + "ms_markers_group", + self._config.get("dataframe", {}).get( + "ms_markers_group", + "msMarkers", + ), + ) + + secs_list = [] + count_rate_list = [] + accumulated_time = 0 + for fid in fids: + count_rate_, secs_ = get_count_rate( + h5py.File(self.files[fid]), + ms_markers_group=ms_markers_group, + ) + secs_list.append((accumulated_time + secs_).T) + count_rate_list.append(count_rate_.T) + accumulated_time += secs_[-1] + + count_rate = np.concatenate(count_rate_list) + secs = np.concatenate(secs_list) + + return count_rate, secs
+ +
[docs] def get_elapsed_time(self, fids: Sequence[int] = None, **kwds) -> float: + """Return the elapsed time in the files specified in ``fids`` from + the msMarkers column. + + Args: + fids (Sequence[int], optional): fids (Sequence[int]): the file ids to + include. Defaults to list of all file ids. + kwds: Keyword arguments: + + - **ms_markers_group**: Name of the hdf5 group containing the ms-markers + + Return: + float: The elapsed time in the files in seconds. + """ + if fids is None: + fids = range(0, len(self.files)) + + ms_markers_group = kwds.pop( + "ms_markers_group", + self._config.get("dataframe", {}).get( + "ms_markers_group", + "msMarkers", + ), + ) + + secs = 0.0 + for fid in fids: + secs += get_elapsed_time( + h5py.File(self.files[fid]), + ms_markers_group=ms_markers_group, + ) + + return secs
+ + +LOADER = MpesLoader +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/sxp/loader.html b/sed/2.1.0/_modules/sed/loader/sxp/loader.html new file mode 100644 index 0000000..8cd0d70 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/sxp/loader.html @@ -0,0 +1,1473 @@ + + + + + + + + + + sed.loader.sxp.loader — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.sxp.loader

+# pylint: disable=duplicate-code
+"""
+This module implements the SXP data loader.
+This loader currently supports the SXP momentum microscope instrument.
+The raw hdf5 data is combined and saved into buffer files and loaded as a dask dataframe.
+The dataframe is a amalgamation of all h5 files for a combination of runs, where the NaNs are
+automatically forward filled across different files.
+This can then be saved as a parquet for out-of-sed processing and reread back to access other
+sed functionality.
+Most of the structure is identical to the FLASH loader.
+"""
+import time
+from functools import reduce
+from pathlib import Path
+from typing import List
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+import dask.dataframe as dd
+import h5py
+import numpy as np
+import pyarrow.parquet as pq
+from joblib import delayed
+from joblib import Parallel
+from natsort import natsorted
+from pandas import DataFrame
+from pandas import MultiIndex
+from pandas import Series
+
+from sed.core import dfops
+from sed.loader.base.loader import BaseLoader
+from sed.loader.utils import parse_h5_keys
+from sed.loader.utils import split_dld_time_from_sector_id
+
+
+
[docs]class SXPLoader(BaseLoader): + """ + The class generates multiindexed multidimensional pandas dataframes from the new SXP + dataformat resolved by both macro and microbunches alongside electrons. + Only the read_dataframe (inherited and implemented) method is accessed by other modules. + """ + + __name__ = "sxp" + + supported_file_types = ["h5"] + + def __init__(self, config: dict) -> None: + super().__init__(config=config) + self.multi_index = ["trainId", "pulseId", "electronId"] + self.index_per_electron: MultiIndex = None + self.index_per_pulse: MultiIndex = None + self.failed_files_error: List[str] = [] + self.array_indices: List[List[slice]] = None + +
[docs] def initialize_paths(self) -> Tuple[List[Path], Path]: + """ + Initializes the paths based on the configuration. + + Returns: + Tuple[List[Path], Path]: A tuple containing a list of raw data directories + paths and the parquet data directory path. + + Raises: + ValueError: If required values are missing from the configuration. + FileNotFoundError: If the raw data directories are not found. + """ + # Parses to locate the raw beamtime directory from config file + if ( + "paths" in self._config["core"] + and self._config["core"]["paths"].get("data_raw_dir", "") + and self._config["core"]["paths"].get("data_parquet_dir", "") + ): + data_raw_dir = [ + Path(self._config["core"]["paths"].get("data_raw_dir", "")), + ] + data_parquet_dir = Path( + self._config["core"]["paths"].get("data_parquet_dir", ""), + ) + + else: + try: + beamtime_id = self._config["core"]["beamtime_id"] + year = self._config["core"]["year"] + except KeyError as exc: + raise ValueError( + "The beamtime_id and year are required.", + ) from exc + + beamtime_dir = Path( + self._config["dataframe"]["beamtime_dir"][self._config["core"]["beamline"]], + ) + beamtime_dir = beamtime_dir.joinpath(f"{year}/{beamtime_id}/") + + if not beamtime_dir.joinpath("raw").is_dir(): + raise FileNotFoundError("Raw data directory not found.") + + data_raw_dir = [beamtime_dir.joinpath("raw")] + + parquet_path = "processed/parquet" + data_parquet_dir = beamtime_dir.joinpath(parquet_path) + + data_parquet_dir.mkdir(parents=True, exist_ok=True) + + return data_raw_dir, data_parquet_dir
+ +
[docs] def get_files_from_run_id( + self, + run_id: str, + folders: Union[str, Sequence[str]] = None, + extension: str = "h5", + **kwds, + ) -> List[str]: + """Returns a list of filenames for a given run located in the specified directory + for the specified data acquisition (daq). + + Args: + run_id (str): The run identifier to locate. + folders (Union[str, Sequence[str]], optional): The directory(ies) where the raw + data is located. Defaults to config["core"]["base_folder"]. + extension (str, optional): The file extension. Defaults to "h5". + kwds: Keyword arguments: + - daq (str): The data acquisition identifier. + + Returns: + List[str]: A list of path strings representing the collected file names. + + Raises: + FileNotFoundError: If no files are found for the given run in the directory. + """ + # Define the stream name prefixes based on the data acquisition identifier + stream_name_prefixes = self._config["dataframe"]["stream_name_prefixes"] + stream_name_postfixes = self._config["dataframe"].get("stream_name_postfixes", {}) + + if isinstance(run_id, (int, np.integer)): + run_id = str(run_id).zfill(4) + + if folders is None: + folders = self._config["core"]["base_folder"] + + if isinstance(folders, str): + folders = [folders] + + daq = kwds.pop("daq", self._config.get("dataframe", {}).get("daq")) + + stream_name_postfix = stream_name_postfixes.get(daq, "") + # Generate the file patterns to search for in the directory + file_pattern = f"**/{stream_name_prefixes[daq]}{run_id}{stream_name_postfix}*." + extension + + files: List[Path] = [] + # Use pathlib to search for matching files in each directory + for folder in folders: + files.extend( + natsorted( + Path(folder).glob(file_pattern), + key=lambda filename: str(filename).rsplit("_", maxsplit=1)[-1], + ), + ) + + # Check if any files are found + if not files: + raise FileNotFoundError( + f"No files found for run {run_id} in directory {str(folders)}", + ) + + # Return the list of found files + return [str(file.resolve()) for file in files]
+ + @property + def available_channels(self) -> List: + """Returns the channel names that are available for use, + excluding pulseId, defined by the json file""" + available_channels = list(self._config["dataframe"]["channels"].keys()) + available_channels.remove("pulseId") + available_channels.remove("trainId") + return available_channels + +
[docs] def get_channels(self, formats: Union[str, List[str]] = "", index: bool = False) -> List[str]: + """ + Returns a list of channels associated with the specified format(s). + + Args: + formats (Union[str, List[str]]): The desired format(s) + ('per_pulse', 'per_electron', 'per_train', 'all'). + index (bool): If True, includes channels from the multi_index. + + Returns: + List[str]: A list of channels with the specified format(s). + """ + # If 'formats' is a single string, convert it to a list for uniform processing. + if isinstance(formats, str): + formats = [formats] + + # If 'formats' is a string "all", gather all possible formats. + if formats == ["all"]: + channels = self.get_channels(["per_pulse", "per_train", "per_electron"], index) + return channels + + channels = [] + for format_ in formats: + # Gather channels based on the specified format(s). + channels.extend( + key + for key in self.available_channels + if self._config["dataframe"]["channels"][key]["format"] == format_ + and key != "dldAux" + ) + # Include 'dldAuxChannels' if the format is 'per_pulse'. + if format_ == "per_pulse" and "dldAux" in self._config["dataframe"]["channels"]: + channels.extend( + self._config["dataframe"]["channels"]["dldAux"]["dldAuxChannels"].keys(), + ) + + # Include channels from multi_index if 'index' is True. + if index: + channels.extend(self.multi_index) + + return channels
+ +
[docs] def reset_multi_index(self) -> None: + """Resets the index per pulse and electron""" + self.index_per_electron = None + self.index_per_pulse = None + self.array_indices = None
+ +
[docs] def create_multi_index_per_electron(self, h5_file: h5py.File) -> None: + """ + Creates an index per electron using pulseId for usage with the electron + resolved pandas DataFrame. + + Args: + h5_file (h5py.File): The HDF5 file object. + + Notes: + - This method relies on the 'pulseId' channel to determine + the macrobunch IDs. + - It creates a MultiIndex with trainId, pulseId, and electronId + as the index levels. + """ + + # relative macrobunch IDs obtained from the trainId channel + train_id, mab_array = self.create_numpy_array_per_channel( + h5_file, + "trainId", + ) + # Internal microbunch IDs obtained from the pulseId channel + train_id, mib_array = self.create_numpy_array_per_channel( + h5_file, + "pulseId", + ) + + # Chopping data into trains + macrobunch_index = [] + microbunch_ids = [] + macrobunch_indices = [] + for i in train_id.index: + # removing broken trailing hit copies + num_trains = self._config["dataframe"].get("num_trains", 0) + if num_trains: + try: + num_valid_hits = np.where(np.diff(mib_array[i].astype(np.int32)) < 0)[0][ + num_trains - 1 + ] + mab_array[i, num_valid_hits:] = 0 + mib_array[i, num_valid_hits:] = 0 + except IndexError: + pass + train_ends = np.where(np.diff(mib_array[i].astype(np.int32)) < -1)[0] + indices = [] + index = 0 + for train, train_end in enumerate(train_ends): + macrobunch_index.append(train_id[i] + np.uint(train)) + microbunch_ids.append(mib_array[i, index:train_end]) + indices.append(slice(index, train_end)) + index = train_end + 1 + macrobunch_indices.append(indices) + self.array_indices = macrobunch_indices + # Create a series with the macrobunches as index and + # microbunches as values + macrobunches = ( + Series( + (microbunch_ids[i] for i in range(len(macrobunch_index))), + name="pulseId", + index=macrobunch_index, + ) + - self._config["dataframe"]["ubid_offset"] + ) + + # Explode dataframe to get all microbunch vales per macrobunch, + # remove NaN values and convert to type int + microbunches = macrobunches.explode().dropna().astype(int) + + # Create temporary index values + index_temp = MultiIndex.from_arrays( + (microbunches.index, microbunches.values), + names=["trainId", "pulseId"], + ) + + # Calculate the electron counts per pulseId unique preserves the order of appearance + electron_counts = index_temp.value_counts()[index_temp.unique()].values + + # Series object for indexing with electrons + electrons = ( + Series( + [np.arange(electron_counts[i]) for i in range(electron_counts.size)], + ) + .explode() + .astype(int) + ) + + # Create a pandas MultiIndex using the exploded datasets + self.index_per_electron = MultiIndex.from_arrays( + (microbunches.index, microbunches.values, electrons), + names=self.multi_index, + )
+ +
[docs] def create_multi_index_per_pulse( + self, + train_id: Series, + np_array: np.ndarray, + ) -> None: + """ + Creates an index per pulse using a pulse resolved channel's macrobunch ID, for usage with + the pulse resolved pandas DataFrame. + + Args: + train_id (Series): The train ID Series. + np_array (np.ndarray): The numpy array containing the pulse resolved data. + + Notes: + - This method creates a MultiIndex with trainId and pulseId as the index levels. + """ + + # Create a pandas MultiIndex, useful for comparing electron and + # pulse resolved dataframes + self.index_per_pulse = MultiIndex.from_product( + (train_id, np.arange(0, np_array.shape[1])), + names=["trainId", "pulseId"], + )
+ +
[docs] def create_numpy_array_per_channel( + self, + h5_file: h5py.File, + channel: str, + ) -> Tuple[Series, np.ndarray]: + """ + Returns a numpy array for a given channel name for a given file. + + Args: + h5_file (h5py.File): The h5py file object. + channel (str): The name of the channel. + + Returns: + Tuple[Series, np.ndarray]: A tuple containing the train ID Series and the numpy array + for the channel's data. + + """ + # Get the data from the necessary h5 file and channel + dataset = h5_file[self._config["dataframe"]["channels"][channel]["dataset_key"]] + index = h5_file[self._config["dataframe"]["channels"][channel]["index_key"]] + + channel_dict = self._config["dataframe"]["channels"][channel] # channel parameters + + train_id = Series(index, name="trainId") # macrobunch + + # unpacks the data into np.ndarray + np_array = dataset[()] + if len(np_array.shape) == 2 and self._config["dataframe"]["channels"][channel].get( + "max_hits", + 0, + ): + np_array = np_array[:, : self._config["dataframe"]["channels"][channel]["max_hits"]] + + # Use predefined axis and slice from the json file + # to choose correct dimension for necessary channel + if "slice" in channel_dict: + np_array = np.take( + np_array, + channel_dict["slice"], + axis=1, + ) + + if "scale" in channel_dict: + np_array = np_array / float(channel_dict["scale"]) + + return train_id, np_array
+ +
[docs] def create_dataframe_per_electron( + self, + np_array: np.ndarray, + channel: str, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per electron]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + channel (str): The name of the channel. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + + Notes: + The microbunch resolved data is exploded and converted to a DataFrame. The MultiIndex + is set, and the NaN values are dropped, alongside the pulseId = 0 (meaningless). + + """ + if self.array_indices is None or len(self.array_indices) != np_array.shape[0]: + raise RuntimeError( + "macrobunch_indices not set correctly, internal inconsistency detected.", + ) + train_data = [] + for i, _ in enumerate(self.array_indices): + for indices in self.array_indices[i]: + train_data.append(np_array[i, indices]) + return ( + Series((train for train in train_data), name=channel) + .explode() + .dropna() + .to_frame() + .set_index(self.index_per_electron) + .drop( + index=np.arange(-self._config["dataframe"]["ubid_offset"], 0), + level=1, + errors="ignore", + ) + )
+ +
[docs] def create_dataframe_per_pulse( + self, + np_array: np.ndarray, + train_id: Series, + channel: str, + channel_dict: dict, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per pulse]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + train_id (Series): The train ID Series. + channel (str): The name of the channel. + channel_dict (dict): The dictionary containing channel parameters. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + + Notes: + - For auxiliary channels, the macrobunch resolved data is repeated 499 times to be + compared to electron resolved data for each auxiliary channel. The data is then + converted to a multicolumn DataFrame. + - For all other pulse resolved channels, the macrobunch resolved data is exploded + to a DataFrame and the MultiIndex is set. + + """ + + # Special case for auxiliary channels + if channel == "dldAux": + # Checks the channel dictionary for correct slices and creates a multicolumn DataFrame + data_frames = ( + Series( + (np_array[i, value] for i in train_id.index), + name=key, + index=train_id, + ).to_frame() + for key, value in channel_dict["dldAuxChannels"].items() + ) + + # Multiindex set and combined dataframe returned + data = reduce(DataFrame.combine_first, data_frames) + + # For all other pulse resolved channels + else: + # Macrobunch resolved data is exploded to a DataFrame and the MultiIndex is set + + # Creates the index_per_pulse for the given channel + self.create_multi_index_per_pulse(train_id, np_array) + data = ( + Series((np_array[i] for i in train_id.index), name=channel) + .explode() + .to_frame() + .set_index(self.index_per_pulse) + ) + + return data
+ +
[docs] def create_dataframe_per_train( + self, + np_array: np.ndarray, + train_id: Series, + channel: str, + ) -> DataFrame: + """ + Returns a pandas DataFrame for a given channel name of type [per train]. + + Args: + np_array (np.ndarray): The numpy array containing the channel data. + train_id (Series): The train ID Series. + channel (str): The name of the channel. + + Returns: + DataFrame: The pandas DataFrame for the channel's data. + """ + return ( + Series((np_array[i] for i in train_id.index), name=channel) + .to_frame() + .set_index(train_id) + )
+ +
[docs] def create_dataframe_per_channel( + self, + h5_file: h5py.File, + channel: str, + ) -> Union[Series, DataFrame]: + """ + Returns a pandas DataFrame for a given channel name from a given file. + + This method takes an h5py.File object `h5_file` and a channel name `channel`, and returns + a pandas DataFrame containing the data for that channel from the file. The format of the + DataFrame depends on the channel's format specified in the configuration. + + Args: + h5_file (h5py.File): The h5py.File object representing the HDF5 file. + channel (str): The name of the channel. + + Returns: + Union[Series, DataFrame]: A pandas Series or DataFrame representing the channel's data. + + Raises: + ValueError: If the channel has an undefined format. + + """ + [train_id, np_array] = self.create_numpy_array_per_channel( + h5_file, + channel, + ) # numpy Array created + channel_dict = self._config["dataframe"]["channels"][channel] # channel parameters + + # If np_array is size zero, fill with NaNs + if np_array.size == 0: + # Fill the np_array with NaN values of the same shape as train_id + np_array = np.full_like(train_id, np.nan, dtype=np.double) + # Create a Series using np_array, with train_id as the index + data = Series( + (np_array[i] for i in train_id.index), + name=channel, + index=train_id, + ) + + # Electron resolved data is treated here + if channel_dict["format"] == "per_electron": + # If index_per_electron is None, create it for the given file + if self.index_per_electron is None: + self.create_multi_index_per_electron(h5_file) + + # Create a DataFrame for electron-resolved data + data = self.create_dataframe_per_electron( + np_array, + channel, + ) + + # Pulse resolved data is treated here + elif channel_dict["format"] == "per_pulse": + # Create a DataFrame for pulse-resolved data + data = self.create_dataframe_per_pulse( + np_array, + train_id, + channel, + channel_dict, + ) + + # Train resolved data is treated here + elif channel_dict["format"] == "per_train": + # Create a DataFrame for train-resolved data + data = self.create_dataframe_per_train(np_array, train_id, channel) + + else: + raise ValueError( + channel + + "has an undefined format. Available formats are \ + per_pulse, per_electron and per_train", + ) + + return data
+ +
[docs] def concatenate_channels( + self, + h5_file: h5py.File, + ) -> DataFrame: + """ + Concatenates the channels from the provided h5py.File into a pandas DataFrame. + + This method takes an h5py.File object `h5_file` and concatenates the channels present in + the file into a single pandas DataFrame. The concatenation is performed based on the + available channels specified in the configuration. + + Args: + h5_file (h5py.File): The h5py.File object representing the HDF5 file. + + Returns: + DataFrame: A concatenated pandas DataFrame containing the channels. + + Raises: + ValueError: If the group_name for any channel does not exist in the file. + + """ + all_keys = parse_h5_keys(h5_file) # Parses all channels present + + # Check for if the provided dataset_keys and index_keys actually exists in the file + for channel in self._config["dataframe"]["channels"]: + dataset_key = self._config["dataframe"]["channels"][channel]["dataset_key"] + if dataset_key not in all_keys: + raise ValueError( + f"The dataset_key for channel {channel} does not exist.", + ) + index_key = self._config["dataframe"]["channels"][channel]["index_key"] + if index_key not in all_keys: + raise ValueError( + f"The index_key for channel {channel} does not exist.", + ) + + # Create a generator expression to generate data frames for each channel + data_frames = ( + self.create_dataframe_per_channel(h5_file, each) for each in self.available_channels + ) + + # Use the reduce function to join the data frames into a single DataFrame + return reduce( + lambda left, right: left.join(right, how="outer"), + data_frames, + )
+ +
[docs] def create_dataframe_per_file( + self, + file_path: Path, + ) -> DataFrame: + """ + Create pandas DataFrames for the given file. + + This method loads an HDF5 file specified by `file_path` and constructs a pandas DataFrame + from the datasets within the file. The order of datasets in the DataFrames is the opposite + of the order specified by channel names. + + Args: + file_path (Path): Path to the input HDF5 file. + + Returns: + DataFrame: pandas DataFrame + + """ + # Loads h5 file and creates a dataframe + with h5py.File(file_path, "r") as h5_file: + self.reset_multi_index() # Reset MultiIndexes for next file + df = self.concatenate_channels(h5_file) + df = df.dropna(subset=self._config["dataframe"].get("tof_column", "dldTimeSteps")) + # correct the 3 bit shift which encodes the detector ID in the 8s time + if self._config["dataframe"].get("split_sector_id_from_dld_time", False): + df = split_dld_time_from_sector_id(df, config=self._config) + return df
+ +
[docs] def create_buffer_file(self, h5_path: Path, parquet_path: Path) -> Union[bool, Exception]: + """ + Converts an HDF5 file to Parquet format to create a buffer file. + + This method uses `create_dataframe_per_file` method to create dataframes from individual + files within an HDF5 file. The resulting dataframe is then saved to a Parquet file. + + Args: + h5_path (Path): Path to the input HDF5 file. + parquet_path (Path): Path to the output Parquet file. + + Raises: + ValueError: If an error occurs during the conversion process. + + """ + try: + ( + self.create_dataframe_per_file(h5_path) + .reset_index(level=self.multi_index) + .to_parquet(parquet_path, index=False) + ) + except Exception as exc: # pylint: disable=broad-except + self.failed_files_error.append(f"{parquet_path}: {type(exc)} {exc}") + return exc + return None
+ +
[docs] def buffer_file_handler( + self, + data_parquet_dir: Path, + detector: str, + force_recreate: bool, + ) -> Tuple[List[Path], List, List]: + """ + Handles the conversion of buffer files (h5 to parquet) and returns the filenames. + + Args: + data_parquet_dir (Path): Directory where the parquet files will be stored. + detector (str): Detector name. + force_recreate (bool): Forces recreation of buffer files + + Returns: + Tuple[List[Path], List, List]: Three lists, one for + parquet file paths, one for metadata and one for schema. + + Raises: + FileNotFoundError: If the conversion fails for any files or no data is available. + """ + + # Create the directory for buffer parquet files + buffer_file_dir = data_parquet_dir.joinpath("buffer") + buffer_file_dir.mkdir(parents=True, exist_ok=True) + + # Create two separate lists for h5 and parquet file paths + h5_filenames = [Path(file) for file in self.files] + parquet_filenames = [ + buffer_file_dir.joinpath(Path(file).stem + detector) for file in self.files + ] + existing_parquet_filenames = [file for file in parquet_filenames if file.exists()] + + # Raise a value error if no data is available after the conversion + if len(h5_filenames) == 0: + raise ValueError("No data available. Probably failed reading all h5 files") + + if not force_recreate: + # Check if the available channels match the schema of the existing parquet files + parquet_schemas = [pq.read_schema(file) for file in existing_parquet_filenames] + config_schema = set(self.get_channels(formats="all", index=True)) + if self._config["dataframe"].get("split_sector_id_from_dld_time", False): + config_schema.add(self._config["dataframe"].get("sector_id_column", False)) + + for i, schema in enumerate(parquet_schemas): + schema_set = set(schema.names) + if schema_set != config_schema: + missing_in_parquet = config_schema - schema_set + missing_in_config = schema_set - config_schema + + missing_in_parquet_str = ( + f"Missing in parquet: {missing_in_parquet}" if missing_in_parquet else "" + ) + missing_in_config_str = ( + f"Missing in config: {missing_in_config}" if missing_in_config else "" + ) + + raise ValueError( + "The available channels do not match the schema of file", + f"{existing_parquet_filenames[i]}", + f"{missing_in_parquet_str}", + f"{missing_in_config_str}", + "Please check the configuration file or set force_recreate to True.", + ) + + # Choose files to read + files_to_read = [ + (h5_path, parquet_path) + for h5_path, parquet_path in zip(h5_filenames, parquet_filenames) + if force_recreate or not parquet_path.exists() + ] + + print(f"Reading files: {len(files_to_read)} new files of {len(h5_filenames)} total.") + + # Initialize the indices for create_buffer_file conversion + self.reset_multi_index() + + # Convert the remaining h5 files to parquet in parallel if there are any + if len(files_to_read) > 0: + error = Parallel(n_jobs=len(files_to_read), verbose=10)( + delayed(self.create_buffer_file)(h5_path, parquet_path) + for h5_path, parquet_path in files_to_read + ) + if any(error): + raise RuntimeError(f"Conversion failed for some files. {error}") + # for h5_path, parquet_path in files_to_read: + # self.create_buffer_file(h5_path, parquet_path) + + # Raise an error if the conversion failed for any files + # TODO: merge this and the previous error trackings + if self.failed_files_error: + raise FileNotFoundError( + "Conversion failed for the following files:\n" + "\n".join(self.failed_files_error), + ) + + print("All files converted successfully!") + + # read all parquet metadata and schema + metadata = [pq.read_metadata(file) for file in parquet_filenames] + schema = [pq.read_schema(file) for file in parquet_filenames] + + return parquet_filenames, metadata, schema
+ +
[docs] def parquet_handler( + self, + data_parquet_dir: Path, + detector: str = "", + parquet_path: Path = None, + converted: bool = False, + load_parquet: bool = False, + save_parquet: bool = False, + force_recreate: bool = False, + ) -> Tuple[dd.DataFrame, dd.DataFrame]: + """ + Handles loading and saving of parquet files based on the provided parameters. + + Args: + data_parquet_dir (Path): Directory where the parquet files are located. + detector (str, optional): Adds a identifier for parquets to distinguish multidetector + systems. + parquet_path (str, optional): Path to the combined parquet file. + converted (bool, optional): True if data is augmented by adding additional columns + externally and saved into converted folder. + load_parquet (bool, optional): Loads the entire parquet into the dd dataframe. + save_parquet (bool, optional): Saves the entire dataframe into a parquet. + force_recreate (bool, optional): Forces recreation of buffer file. + Returns: + tuple: A tuple containing two dataframes: + - dataframe_electron: Dataframe containing the loaded/augmented electron data. + - dataframe_pulse: Dataframe containing the loaded/augmented timed data. + + Raises: + FileNotFoundError: If the requested parquet file is not found. + + """ + + # Construct the parquet path if not provided + if parquet_path is None: + parquet_name = "_".join(str(run) for run in self.runs) + parquet_dir = data_parquet_dir.joinpath("converted") if converted else data_parquet_dir + + parquet_path = parquet_dir.joinpath( + "run_" + parquet_name + detector, + ).with_suffix(".parquet") + + # Check if load_parquet is flagged and then load the file if it exists + if load_parquet: + try: + dataframe = dd.read_parquet(parquet_path) + except Exception as exc: + raise FileNotFoundError( + "The final parquet for this run(s) does not exist yet. " + "If it is in another location, please provide the path as parquet_path.", + ) from exc + + else: + # Obtain the parquet filenames, metadata and schema from the method + # which handles buffer file creation/reading + filenames, metadata, _ = self.buffer_file_handler( + data_parquet_dir, + detector, + force_recreate, + ) + + # Read all parquet files into one dataframe using dask + dataframe = dd.read_parquet(filenames, calculate_divisions=True) + + # Channels to fill NaN values + channels: List[str] = self.get_channels(["per_pulse", "per_train"]) + + overlap = min(file.num_rows for file in metadata) + + print("Filling nan values...") + dataframe = dfops.forward_fill_lazy( + df=dataframe, + columns=channels, + before=overlap, + iterations=self._config["dataframe"].get("forward_fill_iterations", 2), + ) + # Remove the NaNs from per_electron channels + dataframe_electron = dataframe.dropna( + subset=self.get_channels(["per_electron"]), + ) + dataframe_pulse = dataframe[ + self.multi_index + self.get_channels(["per_pulse", "per_train"]) + ] + dataframe_pulse = dataframe_pulse[ + (dataframe_pulse["electronId"] == 0) | (np.isnan(dataframe_pulse["electronId"])) + ] + + # Save the dataframe as parquet if requested + if save_parquet: + dataframe_electron.compute().reset_index(drop=True).to_parquet(parquet_path) + print("Combined parquet file saved.") + + return dataframe_electron, dataframe_pulse
+ +
[docs] def gather_metadata(self, metadata: dict = None) -> dict: + """Dummy function returning empty metadata dictionary for now. + + Args: + metadata (dict, optional): Manual meta data dictionary. Auto-generated + meta data are added to it. Defaults to None. + + Returns: + dict: Metadata dictionary + """ + if metadata is None: + metadata = {} + + return metadata
+ +
[docs] def get_count_rate( + self, + fids: Sequence[int] = None, # noqa: ARG002 + **kwds, # noqa: ARG002 + ): + return None, None
+ +
[docs] def get_elapsed_time(self, fids=None, **kwds): # noqa: ARG002 + return None
+ +
[docs] def read_dataframe( + self, + files: Union[str, Sequence[str]] = None, + folders: Union[str, Sequence[str]] = None, + runs: Union[str, Sequence[str]] = None, + ftype: str = "h5", + metadata: dict = None, + collect_metadata: bool = False, + **kwds, + ) -> Tuple[dd.DataFrame, dd.DataFrame, dict]: + """ + Read express data from the DAQ, generating a parquet in between. + + Args: + files (Union[str, Sequence[str]], optional): File path(s) to process. Defaults to None. + folders (Union[str, Sequence[str]], optional): Path to folder(s) where files are stored + Path has priority such that if it's specified, the specified files will be ignored. + Defaults to None. + runs (Union[str, Sequence[str]], optional): Run identifier(s). Corresponding files will + be located in the location provided by ``folders``. Takes precedence over + ``files`` and ``folders``. Defaults to None. + ftype (str, optional): The file extension type. Defaults to "h5". + metadata (dict, optional): Additional metadata. Defaults to None. + collect_metadata (bool, optional): Whether to collect metadata. Defaults to False. + + Returns: + Tuple[dd.DataFrame, dict]: A tuple containing the concatenated DataFrame and metadata. + + Raises: + ValueError: If neither 'runs' nor 'files'/'data_raw_dir' is provided. + FileNotFoundError: If the conversion fails for some files or no data is available. + """ + t0 = time.time() + + data_raw_dir, data_parquet_dir = self.initialize_paths() + + # Prepare a list of names for the runs to read and parquets to write + if runs is not None: + files = [] + if isinstance(runs, (str, int)): + runs = [runs] + for run in runs: + run_files = self.get_files_from_run_id( + run_id=run, + folders=[str(folder.resolve()) for folder in data_raw_dir], + extension=ftype, + daq=self._config["dataframe"]["daq"], + ) + files.extend(run_files) + self.runs = list(runs) + super().read_dataframe(files=files, ftype=ftype) + + else: + # This call takes care of files and folders. As we have converted runs into files + # already, they are just stored in the class by this call. + super().read_dataframe( + files=files, + folders=folders, + ftype=ftype, + metadata=metadata, + ) + + df, df_timed = self.parquet_handler(data_parquet_dir, **kwds) + + if collect_metadata: + metadata = self.gather_metadata( + metadata=self.metadata, + ) + else: + metadata = self.metadata + print(f"loading complete in {time.time() - t0: .2f} s") + + return df, df_timed, metadata
+ + +LOADER = SXPLoader +
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_modules/sed/loader/utils.html b/sed/2.1.0/_modules/sed/loader/utils.html new file mode 100644 index 0000000..2f459d3 --- /dev/null +++ b/sed/2.1.0/_modules/sed/loader/utils.html @@ -0,0 +1,686 @@ + + + + + + + + + + sed.loader.utils — SED 0.2.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + +
+ +
+ + + + + +
+
+ + + + + + +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for sed.loader.utils

+"""Utilities for loaders
+"""
+from glob import glob
+from typing import cast
+from typing import List
+from typing import Sequence
+from typing import Union
+
+import dask.dataframe
+import numpy as np
+import pandas as pd
+from h5py import File
+from h5py import Group
+from natsort import natsorted
+
+
+
[docs]def gather_files( + folder: str, + extension: str, + f_start: int = None, + f_end: int = None, + f_step: int = 1, + file_sorting: bool = True, +) -> List[str]: + """Collects and sorts files with specified extension from a given folder. + + Args: + folder (str): The folder to search + extension (str): File extension used for glob.glob(). + f_start (int, optional): Start file id used to construct a file selector. + Defaults to None. + f_end (int, optional): End file id used to construct a file selector. + Defaults to None. + f_step (int, optional): Step of file id incrementation, used to construct + a file selector. Defaults to 1. + file_sorting (bool, optional): Option to sort the files by their names. + Defaults to True. + + Returns: + List[str]: List of collected file names. + """ + try: + files = glob(folder + "/*." + extension) + + if file_sorting: + files = cast(List[str], natsorted(files)) + + if f_start is not None and f_end is not None: + files = files[slice(f_start, f_end, f_step)] + + except FileNotFoundError: + print("No legitimate folder address is specified for file retrieval!") + raise + + return files
+ + +
[docs]def parse_h5_keys(h5_file: File, prefix: str = "") -> List[str]: + """Helper method which parses the channels present in the h5 file + Args: + h5_file (h5py.File): The H5 file object. + prefix (str, optional): The prefix for the channel names. + Defaults to an empty string. + + Returns: + List[str]: A list of channel names in the H5 file. + + Raises: + Exception: If an error occurs while parsing the keys. + """ + + # Initialize an empty list to store the channels + file_channel_list = [] + + # Iterate over the keys in the H5 file + for key in h5_file.keys(): + try: + # Check if the object corresponding to the key is a group + if isinstance(h5_file[key], Group): + # If it's a group, recursively call the function on the group object + # and append the returned channels to the file_channel_list + file_channel_list.extend( + parse_h5_keys(h5_file[key], prefix=prefix + "/" + key), + ) + else: + # If it's not a group (i.e., it's a dataset), append the key + # to the file_channel_list + file_channel_list.append(prefix + "/" + key) + except KeyError as exception: + # If an exception occurs, raise a new exception with an error message + raise KeyError( + f"Error parsing key: {prefix}/{key}", + ) from exception + + # Return the list of channels + return file_channel_list
+ + +
[docs]def split_channel_bitwise( + df: dask.dataframe.DataFrame, + input_column: str, + output_columns: Sequence[str], + bit_mask: int, + overwrite: bool = False, + types: Sequence[type] = None, +) -> dask.dataframe.DataFrame: + """Splits a channel into two channels bitwise. + + This function splits a channel into two channels by separating the first n bits from + the remaining bits. The first n bits are stored in the first output column, the + remaining bits are stored in the second output column. + + Args: + df (dask.dataframe.DataFrame): Dataframe to use. + input_column (str): Name of the column to split. + output_columns (Sequence[str]): Names of the columns to create. + bit_mask (int): Bit mask to use for splitting. + overwrite (bool, optional): Whether to overwrite existing columns. + Defaults to False. + types (Sequence[type], optional): Types of the new columns. + + Returns: + dask.dataframe.DataFrame: Dataframe with the new columns. + """ + if len(output_columns) != 2: + raise ValueError("Exactly two output columns must be given.") + if input_column not in df.columns: + raise KeyError(f"Column {input_column} not in dataframe.") + if output_columns[0] in df.columns and not overwrite: + raise KeyError(f"Column {output_columns[0]} already in dataframe.") + if output_columns[1] in df.columns and not overwrite: + raise KeyError(f"Column {output_columns[1]} already in dataframe.") + if bit_mask < 0 or not isinstance(bit_mask, int): + raise ValueError("bit_mask must be a positive. integer") + if types is None: + types = [np.int8 if bit_mask < 8 else np.int16, np.int32] + elif len(types) != 2: + raise ValueError("Exactly two types must be given.") + elif not all(isinstance(t, type) for t in types): + raise ValueError("types must be a sequence of types.") + df[output_columns[0]] = (df[input_column] % 2**bit_mask).astype(types[0]) + df[output_columns[1]] = (df[input_column] // 2**bit_mask).astype(types[1]) + return df
+ + +
[docs]def split_dld_time_from_sector_id( + df: Union[pd.DataFrame, dask.dataframe.DataFrame], + tof_column: str = None, + sector_id_column: str = None, + sector_id_reserved_bits: int = None, + config: dict = None, +) -> Union[pd.DataFrame, dask.dataframe.DataFrame]: + """Converts the 8s time in steps to time in steps and sectorID. + + The 8s detector encodes the dldSectorID in the 3 least significant bits of the + dldTimeSteps channel. + + Args: + df (Union[pd.DataFrame, dask.dataframe.DataFrame]): Dataframe to use. + tof_column (str, optional): Name of the column containing the + time-of-flight steps. Defaults to config["dataframe"]["tof_column"]. + sector_id_column (str, optional): Name of the column containing the + sectorID. Defaults to config["dataframe"]["sector_id_column"]. + sector_id_reserved_bits (int, optional): Number of bits reserved for the + config (dict, optional): Configuration dictionary. Defaults to None. + + Returns: + Union[pd.DataFrame, dask.dataframe.DataFrame]: Dataframe with the new columns. + """ + if tof_column is None: + if config is None: + raise ValueError("Either tof_column or config must be given.") + tof_column = config["dataframe"]["tof_column"] + if sector_id_column is None: + if config is None: + raise ValueError("Either sector_id_column or config must be given.") + sector_id_column = config["dataframe"]["sector_id_column"] + if sector_id_reserved_bits is None: + if config is None: + raise ValueError("Either sector_id_reserved_bits or config must be given.") + sector_id_reserved_bits = config["dataframe"].get("sector_id_reserved_bits", None) + if sector_id_reserved_bits is None: + raise ValueError('No value for "sector_id_reserved_bits" found in config.') + + if sector_id_column in df.columns: + raise ValueError( + f"Column {sector_id_column} already in dataframe. This function is not idempotent.", + ) + df = split_channel_bitwise( + df=df, + input_column=tof_column, + output_columns=[sector_id_column, tof_column], + bit_mask=sector_id_reserved_bits, + overwrite=True, + types=[np.int8, np.int32], + ) + return df
+
+ +
+ + + + + +
+ +
+
+
+ +
+ + + + +
+
+ +
+ +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/sed/2.1.0/_sources/index.md.txt b/sed/2.1.0/_sources/index.md.txt new file mode 100644 index 0000000..afeefb0 --- /dev/null +++ b/sed/2.1.0/_sources/index.md.txt @@ -0,0 +1,54 @@ +--- +myst: + html_meta: + "description lang=en": | + Top-level documentation for sed, with links to the rest + of the site.. +html_theme.sidebar_secondary.remove: true +--- + +# SED documentation + +SED (Single Event Data Frame) is a collection of routines and utilities to handle photoelectron resolved datastreams. +It features lazy evaluation of dataframe processing using dask, numba-accelerated multi-dimensional binning, calibration and correction for trARPES (Time- and angle-resolved photoemission spectroscopy) datasets. +The package ensures provenance and FAIR data through metadata tracking, usage of the community defined NeXus format. + +## User guide + +We introduce different functionalities of the package by several step-by-step use guides: + +```{toctree} +:maxdepth: 2 + +user_guide/index + +``` + +## Examples + +Several example notebooks to demonstrate the functionality of SED for end-to-end data analysis workflows. + +```{toctree} +:maxdepth: 2 + +workflows/index +``` + +## API + +```{toctree} +:maxdepth: 2 + +sed/api +``` + + +## Community and contribution guide + +Information about the community behind this theme and how you can contribute. + +```{toctree} +:maxdepth: 2 + +misc/contribution +``` diff --git a/sed/2.1.0/_sources/misc/contributing.rst.txt b/sed/2.1.0/_sources/misc/contributing.rst.txt new file mode 100644 index 0000000..7aab314 --- /dev/null +++ b/sed/2.1.0/_sources/misc/contributing.rst.txt @@ -0,0 +1,132 @@ +============================== +Contributing to sed +============================== + +Welcome to the sed project, a collaboration of the Open Community of Multidimensional Photoemission Spectroscopy. + +Whether you are a beamline scientist hoping to create a loader for your data, or would like to add a new feature to the project, we welcome your contributions. + +This guide will walk you through the process of setting up your development environment, and the workflow for contributing to the project. + + +Getting Started +=============== + +1. **Clone the Repository:** + + - If you are a member of the repository, clone the repository to your local machine: + + .. code-block:: bash + + git clone https://github.com/OpenCOMPES/sed.git + + - If you are not a member of the repository, clone your fork of the repository to your local machine: + + .. code-block:: bash + + git clone https://github.com/yourusername/sed.git + + + +2. **Install Python and Poetry:** + - Ensure you have Python 3.8, 3.9, 3.10 or 3.11 and poetry installed. + + .. code-block:: bash + + pip install pipx + pipx install poetry + +3. **Clone Repository:** + + .. code-block:: bash + + git clone https://github.com/OpenCOMPES/sed.git + +4. **Install Dependencies:** + - Navigate to the project directory and install the project dependencies (including development ones) using Poetry: + + .. code-block:: bash + + poetry install --dev + + +Development Workflow +===================== + +.. note:: + This guide assumes that you have Python (version 3.8, 3.9, 3.10, 3.11) and poetry with dev dependencies installed on your machine. + +1. **Install pre-commit hooks:** To ensure your code is formatted correctly, install pre-commit hooks: + + .. code-block:: bash + + pip install pre-commit + + +2. **Create a Branch:** Create a new branch for your feature or bug fix and make changes: + + .. code-block:: bash + + git checkout -b feature-branch + + +3. **Write Tests:** If your contribution introduces new features or fixes a bug, add tests to cover your changes. + +4. **Run Tests:** To ensure no functionality is broken, run the tests: + + .. code-block:: bash + + pytest tests + + +5. **Commit Changes:** Commit your changes with a clear and concise commit message: + + .. code-block:: bash + + git commit -a -m "Your commit message" + + +6. **Push Changes:** Push your changes to your fork: + + .. code-block:: bash + + git push origin feature-branch + + +7. **Open a Pull Request:** Open a pull request against the `main` branch of sed. + +Pull Request Guidelines +======================= + +Please give a brief description of the changes you have made in your pull request. +If your pull request fixes an issue, please reference the issue number in the pull request description. + +Before your pull request can be merged, it must pass the following checks: + +- **Linting Check** + +- **Tests Check** + +- **Code Review:** A maintainer will review your code and provide feedback if necessary. + +- **Rebase with Main:** Ensure your branch is up-to-date with the latest changes from the `main` branch. + +Once all checks are successful and your code is approved, it will be merged into the main branch. + +Developing a Loader +=================== +If you are developing a loader for your beamline, please follow the guidelines below. + +1. **Create a Loader:** + + - Create a new loader in the `sed/loaders` directory. + - The loader should be a subclass of `sed.loader.base.loader.BaseLoader` and implement a few methods. See :ref:`base_loader` for more information. + - Give your class a `__name__` attribute, which is used to select the loader in user config files (See the generic loader for example). + - At the end of your module, provide a `LOADER = YourNameLoader` variable, which is used to register that loader in the registry. See :ref:`loader_interface`. + +2. **Write Tests:** + + - Write tests for your loader in the `tests/loaders` directory. + - You can also include a small test data in the `tests/data` directory. + +3. **Add Loader to Documentation:** Add your loader to the documentation in `docs/sed/loaders.rst`. diff --git a/sed/2.1.0/_sources/misc/contribution.md.txt b/sed/2.1.0/_sources/misc/contribution.md.txt new file mode 100644 index 0000000..88d7b56 --- /dev/null +++ b/sed/2.1.0/_sources/misc/contribution.md.txt @@ -0,0 +1,8 @@ +# Development + +```{toctree} +:maxdepth: 2 + +contributing +maintain +``` diff --git a/sed/2.1.0/_sources/misc/maintain.rst.txt b/sed/2.1.0/_sources/misc/maintain.rst.txt new file mode 100644 index 0000000..cf8d0a9 --- /dev/null +++ b/sed/2.1.0/_sources/misc/maintain.rst.txt @@ -0,0 +1,156 @@ +How to Maintain +=============== + +Documentation +------------- +**Build Locally:** + +Users can generate documentation locally using the following steps: + +1. **Install Dependencies:** + +.. code-block:: bash + + pip install pipx + pipx install poetry + +1. **Clone Repository:** + +.. code-block:: bash + + git clone https://github.com/OpenCOMPES/sed.git + +3. **Navigate to Repository:** + +.. code-block:: bash + + cd sed + +4. **Copy Tutorial Files:** + +Doing this step will slow down the build process significantly. It also requires two datasets so 20 GB of free space is required. + +.. code-block:: bash + + cp -r tutorial docs/ + cp -r sed/config docs/sed + +5. **Create a virtual environment:** + +.. code-block:: bash + + poetry shell + +6. **Install Dependencies:** + +.. code-block:: bash + + poetry install --with docs + +7. **Build Documentation:** + +.. code-block:: bash + + poetry run sphinx-build -b html docs _build + +8. **View Documentation:** + +Open the generated HTML documentation in the `_build` directory. + +**GitHub Workflow:** + +The documentation workflow is designed to automatically build and deploy documentation. Additionally, maintainers of sed repository can manually trigger the documentation workflow from the Actions tab. +Here's how the workflow works: + +1. **Workflow Configuration:** + - The documentation workflow is triggered on push events to the main branch for specific paths and files related to documentation. + - Manual execution is possible using the workflow_dispatch event from the Actions tab. + + .. code-block:: yaml + + on: + push: + branches: [ main ] + paths: + - sed/**/* + - pyproject.toml + - tutorial/** + - .github/workflows/documentation.yml + workflow_dispatch: + +2. **Permissions:** + - The workflow sets permissions for the GITHUB_TOKEN to allow deployment to GitHub Pages. + - Permissions include read access to contents and write access to pages. + + .. code-block:: yaml + + permissions: + contents: read + pages: write + id-token: write + +3. **Concurrent Deployment:** + - Only one concurrent deployment is allowed to prevent conflicts. + - Future idea would be to have different deployment for different versions. + - Runs queued between an in-progress run and the latest queued run are skipped. + + .. code-block:: yaml + + concurrency: + group: "pages" + cancel-in-progress: false + +4. **Workflow Steps:** + - The workflow is divided into two jobs: build and deploy. + + a. **Build Job:** + - Sets up the build environment, checks out the repository, and installs necessary dependencies using Poetry. + - Installs notebook dependencies and Pandoc. + - Copies tutorial files to the docs directory and removes unnecessary notebooks. + - Downloads RAW data for tutorials. + - Builds Sphinx documentation. + + b. **Deploy Job:** + - Deploys the built documentation to GitHub Pages. + +5. **Manual Execution:** + - To manually trigger the workflow, go to the Actions tab on GitHub. + - Click on "Run workflow" for the "documentation" workflow. + + +Release +------- + +**Creating a Release** + +To create a release, follow these steps: + + a. **Create a Git Release on Github:** + + - On the "tags" page, select "releases", and press "Draft a new release". + - At "choose a tag", type in the name of the new release tag. Make sure to have a **v** prefix in the tag name, e.g. **v0.1.10**. + - Confirm creation of the tag, and press "Generate release notes". Edit the notes as appropriate (e.g. remove auto-generated update PRs). + - Press "Publish release". This will create the new tag and release entry, and issue the build and upload to PyPI. + + b. **Check PyPI for the Published Package:** + + - Visit the PyPI page (https://pypi.org/project/sed-processor/). + - Confirm that the new version (e.g., 0.1.10) has been published. + + c. **If you don't see update on PyPI:** + + - Visit the GitHub Actions page and monitor the Release workflow (https://github.com/OpenCOMPES/sed/actions/workflows/release.yml). + - Check if errors occurred during the release process. + + +**Understanding the Release Workflow** + +- *Release Job:* + - This workflow is responsible for versioning and releasing the package. + - A release job runs on every git tag push (e.g., `git tag v0.1.5`) and publishes the package to PyPI. + - If the publish is successful, the version in the `pyproject.toml` file is updated and pushed to the main branch. + +- *Prerelease Job:* + - This workflow is triggered automatically on every pull request (PR) to the main branch. + - It increments the version number for prerelease (e.g., from 0.1.5 to 0.1.6a0 to 0.1.6a1) and publishes the package to PyPI. + - If the publish is successful, the version in the `pyproject.toml` file is updated and pushed to the main branch. diff --git a/sed/2.1.0/_sources/sed/api.rst.txt b/sed/2.1.0/_sources/sed/api.rst.txt new file mode 100644 index 0000000..fffcc53 --- /dev/null +++ b/sed/2.1.0/_sources/sed/api.rst.txt @@ -0,0 +1,17 @@ +====== +API +====== + +.. toctree:: + :maxdepth: 1 + + core + dfops + loader + binning + calibrator + dataset + diagnostic + io + metadata + config diff --git a/sed/2.1.0/_sources/sed/binning.rst.txt b/sed/2.1.0/_sources/sed/binning.rst.txt new file mode 100644 index 0000000..cda0a68 --- /dev/null +++ b/sed/2.1.0/_sources/sed/binning.rst.txt @@ -0,0 +1,21 @@ +Binning +=================================================== +Main functions +################################################### + +.. automodule:: sed.binning + :members: + :undoc-members: + + + +Used helper functions +################################################## + +.. automodule:: sed.binning.numba_bin + :members: + :undoc-members: + +.. automodule:: sed.binning.utils + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/calibrator.rst.txt b/sed/2.1.0/_sources/sed/calibrator.rst.txt new file mode 100644 index 0000000..5d74ab2 --- /dev/null +++ b/sed/2.1.0/_sources/sed/calibrator.rst.txt @@ -0,0 +1,20 @@ +Calibrator +=================================================== + +Momentum calibration and correction +################################################### +.. automodule:: sed.calibrator.momentum + :members: + :undoc-members: + +Energy calibration and correction +################################################### +.. automodule:: sed.calibrator.energy + :members: + :undoc-members: + +Delay calibration and correction +################################################### +.. automodule:: sed.calibrator.delay + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/config.rst.txt b/sed/2.1.0/_sources/sed/config.rst.txt new file mode 100644 index 0000000..485bd7e --- /dev/null +++ b/sed/2.1.0/_sources/sed/config.rst.txt @@ -0,0 +1,8 @@ +Config +*************************************************** +.. automodule:: sed.core.config + :members: + :undoc-members: + + +.. _example_config: diff --git a/sed/2.1.0/_sources/sed/core.rst.txt b/sed/2.1.0/_sources/sed/core.rst.txt new file mode 100644 index 0000000..f590c55 --- /dev/null +++ b/sed/2.1.0/_sources/sed/core.rst.txt @@ -0,0 +1,5 @@ +Core +=================================================== +.. automodule:: sed.core + :members: + :undoc-members: \ No newline at end of file diff --git a/sed/2.1.0/_sources/sed/dataset.rst.txt b/sed/2.1.0/_sources/sed/dataset.rst.txt new file mode 100644 index 0000000..b014746 --- /dev/null +++ b/sed/2.1.0/_sources/sed/dataset.rst.txt @@ -0,0 +1,328 @@ +Dataset +=================================================== + +SED comes with the ability to download and extract any URL based +datasets. By default, user can the “WSe2”, “TaS2” and “Gd_W110” datasets +but easy to extend this list. + +Getting datasets +------------------------ + +.. code:: python + + import os + from sed.dataset import dataset + +get() +^^^^^ + +The “get” just needs the data name, but another root_dir can be provided. + +Try to interrupt the download process and restart to see that it continues the download from where it stopped + +.. code:: python + + dataset.get("WSe2", remove_zip = False) + +.. parsed-literal:: + + Using default data path for "WSe2": "/datasets/WSe2" + + 3%|▎ | 152M/5.73G [00:02<01:24, 71.3MB/s] + + Using default data path for "WSe2": "/datasets/WSe2" + + 100%|██████████| 5.73G/5.73G [01:09<00:00, 54.3MB/s] + + Download complete. + +Not providing “remove_zip” at all will by default delete the zip file after extraction + +.. code:: python + + dataset.get("WSe2") + +Setting the “use_existing” keyword to False allows to download the data in another location. Default is to use existing data + +.. code:: python + + dataset.get("WSe2", root_dir = "new_datasets", use_existing=False) + +.. parsed-literal:: + + Using specified data path for "WSe2": "/new_datasets/datasets/WSe2" + Created new directory at /new_datasets/datasets/WSe2 + + + 3%|▎ | 152M/5.73G [00:02<01:24, 71.3MB/s] + + +Interrupting extraction has similar behavior to download and just continues from where it stopped. + +Or if user deletes the extracted documents, it re-extracts from zip file + +.. code:: python + + dataset.get("WSe2", remove_zip = False) + + ## Try to remove some files and rerun this command. + +.. parsed-literal:: + + Using default data path for "WSe2": "/datasets/WSe2" + WSe2 data is already fully downloaded. + + + 5.73GB [00:00, 12.6MB/s] + + Download complete. + Extracting WSe2 data... + + + + 100%|██████████| 113/113 [02:41<00:00, 1.43s/file] + + WSe2 data extracted successfully. + +remove() +^^^^^^^^ + +“remove” allows removal of some or all instances of existing data + +This would remove only one of the two existing paths + +.. code:: python + + dataset.remove("WSe2", instance = dataset.existing_data_paths[0]) + +.. parsed-literal:: + + Removed /datasets/WSe2 + +This removes all instances, if any present + +.. code:: python + + dataset.remove("WSe2") + +.. parsed-literal:: + + WSe2 data is not present. + +Attributes useful for user +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All available datasets after looking at module, user and folder levels + +.. code:: python + + dataset.available + +.. parsed-literal:: + + ['WSe2', 'TaS2', 'Gd_W110'] + +The dir and subdirs where data is located + +.. code:: python + + dataset.dir + +.. parsed-literal:: + + '/datasets/WSe2' + +.. code:: python + + dataset.subdirs + +.. parsed-literal:: + + ['/datasets/WSe2/Scan049_1', + '/datasets/WSe2/energycal_2019_01_08'] + +Existing locations where data is present + +.. code:: python + + dataset.existing_data_paths + +.. parsed-literal:: + + ['/new_dataset/datasets/WSe2', + '/datasets/WSe2'] + +Example of adding custom datasets +--------------------------------- + +DatasetsManager +^^^^^^^^^^^^^^^ + +Allows to add or remove datasets in json file at any level (module, user, folder). + +Looks at all levels to give the available datasets + +.. code:: python + + import os + from sed.dataset import DatasetsManager + +We add a new dataset to both folder and user levels + +This dataset also has “rearrange_files” set to True, which takes all files in subfolders and puts them in the main dataset specific directory + +.. code:: python + + example_dset_name = "Example" + example_dset_info = {} + + example_dset_info["url"] = "https://example-dataset.com/download" # not a real path + example_dset_info["subdirs"] = ["Example_subdir"] + example_dset_info["rearrange_files"] = True + + DatasetsManager.add(data_name=example_dset_name, info=example_dset_info, levels=["folder", "user"]) + +.. parsed-literal:: + + Added Example dataset to folder datasets.json + Added Example dataset to user datasets.json + +datasets.json should be available in execution folder after this + +.. code:: python + + assert os.path.exists("./datasets.json") + dataset.available + +.. parsed-literal:: + + ['Example', 'WSe2', 'TaS2', 'Gd_W110'] + +This will remove the Example dataset from the user json file + +.. code:: python + + DatasetsManager.remove(data_name=example_dset_name, levels=["user"]) + +.. parsed-literal:: + + Removed Example dataset from user datasets.json + +Adding dataset that already exists will give an error. Likewise, removing one that doesn’t exist + +.. code:: python + + # This should give an error + DatasetsManager.add(data_name=example_dset_name, info=example_dset_info, levels=["folder"]) + +.. parsed-literal:: + + ValueError: Dataset Example already exists in folder datasets.json. + + +Now that dataset.json with Example exists in current dir, lets try to fetch it + +.. code:: python + + dataset.get("Example") + +.. parsed-literal:: + + Using default data path for "Example": "/datasets/Example" + Created new directory at /datasets/Example + Download complete. + Extracting Example data... + + + 100%|██████████| 4/4 [00:00<00:00, 28.10file/s] + + Example data extracted successfully. + Removed Example.zip file. + Rearranging files in Example_subdir. + + + + 100%|██████████| 3/3 [00:00<00:00, 696.11file/s] + + File movement complete. + Rearranging complete. + +.. code:: python + + print(dataset.dir) + print(dataset.subdirs) + +.. parsed-literal:: + + /datasets/Example + [] + +lets download to another location + +.. code:: python + + dataset.get("Example", root_dir = "new_datasets", use_existing = False) + +.. parsed-literal.. parsed-literal:: + + Using specified data path for "Example": "/new_datasets/datasets/Example" + Created new directory at /new_datasets/datasets/Example + Download complete. + Extracting Example data... + + + 100%|██████████| 4/4 [00:00<00:00, 28.28file/s] + + Example data extracted successfully. + Removed Example.zip file. + Rearranging files in Example_subdir. + + + + 100%|██████████| 3/3 [00:00<00:00, 546.16file/s] + + File movement complete. + Rearranging complete. + +we can remove one instance + +.. code:: python + + print(dataset.existing_data_paths) + path_to_remove = dataset.existing_data_paths[0] + +.. parsed-literal:: + + ['/new_datasets/datasets/Example', '/datasets/Example'] + +.. code:: python + + dataset.remove(data_name="Example", instance=path_to_remove) + +.. parsed-literal:: + + Removed /new_datasets/datasets/Example + +.. code:: python + + assert not os.path.exists(path_to_remove) + +.. code:: python + + print(dataset.existing_data_paths) + +.. parsed-literal:: + + ['/datasets/Example'] + +Default datasets.json +--------------------- + +.. literalinclude:: ../../sed/dataset/datasets.json + :language: json + +API +------------------------ +.. automodule:: sed.dataset.dataset + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/dfops.rst.txt b/sed/2.1.0/_sources/sed/dfops.rst.txt new file mode 100644 index 0000000..63ff574 --- /dev/null +++ b/sed/2.1.0/_sources/sed/dfops.rst.txt @@ -0,0 +1,5 @@ +Dataframe Operations +=================================================== +.. automodule:: sed.core.dfops + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/diagnostic.rst.txt b/sed/2.1.0/_sources/sed/diagnostic.rst.txt new file mode 100644 index 0000000..b5abd74 --- /dev/null +++ b/sed/2.1.0/_sources/sed/diagnostic.rst.txt @@ -0,0 +1,5 @@ +Diagnostics +=================================================== +.. automodule:: sed.diagnostics + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/io.rst.txt b/sed/2.1.0/_sources/sed/io.rst.txt new file mode 100644 index 0000000..344df80 --- /dev/null +++ b/sed/2.1.0/_sources/sed/io.rst.txt @@ -0,0 +1,5 @@ +IO +=================================================== +.. automodule:: sed.io + :members: + :undoc-members: \ No newline at end of file diff --git a/sed/2.1.0/_sources/sed/loader.rst.txt b/sed/2.1.0/_sources/sed/loader.rst.txt new file mode 100644 index 0000000..468f34e --- /dev/null +++ b/sed/2.1.0/_sources/sed/loader.rst.txt @@ -0,0 +1,56 @@ +Data loader +=================================================== + +.. _loader_interface: + +Loader Interface +################################################### +.. automodule:: sed.loader.loader_interface + :members: + :undoc-members: + +.. _base_loader: + +Abstract BaseLoader +################################################### +.. automodule:: sed.loader.base.loader + :members: + :undoc-members: + +GenericLoader +################################################### +.. automodule:: sed.loader.generic.loader + :members: + :undoc-members: + +MpesLoader +################################################### +.. automodule:: sed.loader.mpes.loader + :members: + :undoc-members: + +FlashLoader +################################################### +.. automodule:: sed.loader.flash.loader + :members: + :undoc-members: + +.. automodule:: sed.loader.flash.metadata + :members: + :undoc-members: + +SXPLoader +################################################### +.. automodule:: sed.loader.sxp.loader + :members: + :undoc-members: + +Utilities +################################################### +.. automodule:: sed.loader.utils + :members: + :undoc-members: + +.. automodule:: sed.loader.mirrorutil + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/sed/metadata.rst.txt b/sed/2.1.0/_sources/sed/metadata.rst.txt new file mode 100644 index 0000000..dbb4bcb --- /dev/null +++ b/sed/2.1.0/_sources/sed/metadata.rst.txt @@ -0,0 +1,5 @@ +Metadata +=================================================== +.. automodule:: sed.core.metadata + :members: + :undoc-members: diff --git a/sed/2.1.0/_sources/user_guide/advanced_topics.md.txt b/sed/2.1.0/_sources/user_guide/advanced_topics.md.txt new file mode 100644 index 0000000..ebd773c --- /dev/null +++ b/sed/2.1.0/_sources/user_guide/advanced_topics.md.txt @@ -0,0 +1,6 @@ +```{toctree} +:maxdepth: 1 +../tutorial/6_binning_with_time-stamped_data +../tutorial/7_correcting_orthorhombic_symmetry +../tutorial/8_jittering_tutorial +``` diff --git a/sed/2.1.0/_sources/user_guide/config.md.txt b/sed/2.1.0/_sources/user_guide/config.md.txt new file mode 100644 index 0000000..7fb62f7 --- /dev/null +++ b/sed/2.1.0/_sources/user_guide/config.md.txt @@ -0,0 +1,29 @@ +# Configuration + +The config module contains a mechanism to collect configuration parameters from various sources and configuration files, and to combine them in a hierarchical manner into a single, consistent configuration dictionary. +It will load an (optional) provided config file, or alternatively use a passed python dictionary as initial config dictionary, and subsequently look for the following additional config files to load: + +* ``folder_config``: A config file of name :file:`sed_config.yaml` in the current working directory. This is mostly intended to pass calibration parameters of the workflow between different notebook instances. +* ``user_config``: A config file provided by the user, stored as :file:`.sed/config.yaml` in the current user's home directly. This is intended to give a user the option for individual configuration modifications of system settings. +* ``system_config``: A config file provided by the system administrator, stored as :file:`/etc/sed/config.yaml` on Linux-based systems, and :file:`%ALLUSERSPROFILE%/sed/config.yaml` on Windows. This should provide all necessary default parameters for using the sed processor with a given setup. For an example for an mpes setup, see :ref:`example_config` +* ``default_config``: The default configuration shipped with the package. Typically, all parameters here should be overwritten by any of the other configuration files. + +The config mechanism returns the combined dictionary, and reports the loaded configuration files. In order to disable or overwrite any of the configuration files, they can be also given as optional parameters (path to a file, or python dictionary). + +## Default configuration settings + +```{literalinclude} ../../sed/config/default.yaml +:language: yaml +``` + +## Example configuration file for mpes (METIS momentum microscope at FHI-Berlin) + +```{literalinclude} ../../sed/config/mpes_example_config.yaml +:language: yaml +``` + +## Example configuration file for flash (HEXTOF momentum microscope at FLASH, Desy) + +```{literalinclude} ../../sed/config/flash_example_config.yaml +:language: yaml +``` diff --git a/sed/2.1.0/_sources/user_guide/index.md.txt b/sed/2.1.0/_sources/user_guide/index.md.txt new file mode 100644 index 0000000..16d36aa --- /dev/null +++ b/sed/2.1.0/_sources/user_guide/index.md.txt @@ -0,0 +1,29 @@ +--- +myst: + html_meta: + "description lang=en": | + Documentation for users. +--- +# User Guide + +## Installing SED +```{toctree} +:maxdepth: 1 +installation +``` + +## Basic concepts +```{toctree} +:maxdepth: 1 +../tutorial/1_binning_fake_data +../tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data +../tutorial/3_metadata_collection_and_export_to_NeXus +config +``` + +## Advanced Topics + +```{toctree} +:maxdepth: 1 +advanced_topics +``` diff --git a/sed/2.1.0/_sources/user_guide/installation.md.txt b/sed/2.1.0/_sources/user_guide/installation.md.txt new file mode 100644 index 0000000..8c3f44d --- /dev/null +++ b/sed/2.1.0/_sources/user_guide/installation.md.txt @@ -0,0 +1,74 @@ +# Installation + +```{attention} +Requires Python 3.9+ and pip installed. +``` + +- Create a new virtual environment using either venv, pyenv, conda, etc. See below for an example. + +```bash +python -m venv .sed-venv +``` + +- Activate your environment: + +```bash +# On macOS/Linux +source .sed-venv/bin/activate + +# On Windows +.sed-venv\Scripts\activate +``` + +- Install `sed`, distributed as `sed-processor` on PyPI: + +```bash +pip install sed-processor[all] +``` + +- If you do not use Jupyter Notebook or Jupyter Lab, you can skip the installing those dependencies: + +```bash +pip install sed-processor +``` + +```{note} +If you intend to work with Jupyter notebooks, it is helpful to install a Jupyter kernel for your environment. This can be done, once your environment is activated, by typing: +```bash +python -m ipykernel install --user --name=sed_kernel +``` + +# Development version + +```{attention} +Requires Git, Python 3.9+ and pip installed. +``` + +1. Clone the repository: + +```bash +git clone https://github.com/OpenCOMPES/sed.git +cd sed +``` + +2. Create and activate a virtual environment: + +```bash +# Create a virtual environment +python -m venv .sed-dev + +# Activate the virtual environment +# On macOS/Linux +source .sed-dev/bin/activate + +# On Windows +.sed-dev\Scripts\activate +``` + +3. Install the repository in editable mode with all dependencies: + +```bash +pip install -e .[all] +``` + +Now you have the development version of `sed` installed in your local environment. Feel free to make changes and submit pull requests. diff --git a/sed/2.1.0/_sources/workflows/index.md.txt b/sed/2.1.0/_sources/workflows/index.md.txt new file mode 100644 index 0000000..f056d13 --- /dev/null +++ b/sed/2.1.0/_sources/workflows/index.md.txt @@ -0,0 +1,11 @@ +--- +myst: + html_meta: + "description lang=en": | + Workflows showcasing different SED loaders and methods +--- +# Workflows + +```{toctree} +../tutorial/4_hextof_workflow +``` diff --git a/sed/2.1.0/_static/basic.css b/sed/2.1.0/_static/basic.css new file mode 100644 index 0000000..b97662d --- /dev/null +++ b/sed/2.1.0/_static/basic.css @@ -0,0 +1,921 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 270px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/sed/2.1.0/_static/doctools.js b/sed/2.1.0/_static/doctools.js new file mode 100644 index 0000000..d06a71d --- /dev/null +++ b/sed/2.1.0/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/sed/2.1.0/_static/documentation_options.js b/sed/2.1.0/_static/documentation_options.js new file mode 100644 index 0000000..dd1759d --- /dev/null +++ b/sed/2.1.0/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '0.2.1', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/sed/2.1.0/_static/file.png b/sed/2.1.0/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..a858a410e4faa62ce324d814e4b816fff83a6fb3 GIT binary patch literal 286 zcmV+(0pb3MP)s`hMrGg#P~ix$^RISR_I47Y|r1 z_CyJOe}D1){SET-^Amu_i71Lt6eYfZjRyw@I6OQAIXXHDfiX^GbOlHe=Ae4>0m)d(f|Me07*qoM6N<$f}vM^LjV8( literal 0 HcmV?d00001 diff --git a/sed/2.1.0/_static/language_data.js b/sed/2.1.0/_static/language_data.js new file mode 100644 index 0000000..250f566 --- /dev/null +++ b/sed/2.1.0/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/sed/2.1.0/_static/minus.png b/sed/2.1.0/_static/minus.png new file mode 100644 index 0000000000000000000000000000000000000000..d96755fdaf8bb2214971e0db9c1fd3077d7c419d GIT binary patch literal 90 zcmeAS@N?(olHy`uVBq!ia0vp^+#t*WBp7;*Yy1LIik>cxAr*|t7R?Mi>2?kWtu=nj kDsEF_5m^0CR;1wuP-*O&G^0G}KYk!hp00i_>zopr08q^qX#fBK literal 0 HcmV?d00001 diff --git a/sed/2.1.0/_static/nbsphinx-broken-thumbnail.svg b/sed/2.1.0/_static/nbsphinx-broken-thumbnail.svg new file mode 100644 index 0000000..4919ca8 --- /dev/null +++ b/sed/2.1.0/_static/nbsphinx-broken-thumbnail.svg @@ -0,0 +1,9 @@ + + + + diff --git a/sed/2.1.0/_static/nbsphinx-code-cells.css b/sed/2.1.0/_static/nbsphinx-code-cells.css new file mode 100644 index 0000000..a3fb27c --- /dev/null +++ b/sed/2.1.0/_static/nbsphinx-code-cells.css @@ -0,0 +1,259 @@ +/* remove conflicting styling from Sphinx themes */ +div.nbinput.container div.prompt *, +div.nboutput.container div.prompt *, +div.nbinput.container div.input_area pre, +div.nboutput.container div.output_area pre, +div.nbinput.container div.input_area .highlight, +div.nboutput.container div.output_area .highlight { + border: none; + padding: 0; + margin: 0; + box-shadow: none; +} + +div.nbinput.container > div[class*=highlight], +div.nboutput.container > div[class*=highlight] { + margin: 0; +} + +div.nbinput.container div.prompt *, +div.nboutput.container div.prompt * { + background: none; +} + +div.nboutput.container div.output_area .highlight, +div.nboutput.container div.output_area pre { + background: unset; +} + +div.nboutput.container div.output_area div.highlight { + color: unset; /* override Pygments text color */ +} + +/* avoid gaps between output lines */ +div.nboutput.container div[class*=highlight] pre { + line-height: normal; +} + +/* input/output containers */ +div.nbinput.container, +div.nboutput.container { + display: -webkit-flex; + display: flex; + align-items: flex-start; + margin: 0; + width: 100%; +} +@media (max-width: 540px) { + div.nbinput.container, + div.nboutput.container { + flex-direction: column; + } +} + +/* input container */ +div.nbinput.container { + padding-top: 5px; +} + +/* last container */ +div.nblast.container { + padding-bottom: 5px; +} + +/* input prompt */ +div.nbinput.container div.prompt pre, +/* for sphinx_immaterial theme: */ +div.nbinput.container div.prompt pre > code { + color: #307FC1; +} + +/* output prompt */ +div.nboutput.container div.prompt pre, +/* for sphinx_immaterial theme: */ +div.nboutput.container div.prompt pre > code { + color: #BF5B3D; +} + +/* all prompts */ +div.nbinput.container div.prompt, +div.nboutput.container div.prompt { + width: 4.5ex; + padding-top: 5px; + position: relative; + user-select: none; +} + +div.nbinput.container div.prompt > div, +div.nboutput.container div.prompt > div { + position: absolute; + right: 0; + margin-right: 0.3ex; +} + +@media (max-width: 540px) { + div.nbinput.container div.prompt, + div.nboutput.container div.prompt { + width: unset; + text-align: left; + padding: 0.4em; + } + div.nboutput.container div.prompt.empty { + padding: 0; + } + + div.nbinput.container div.prompt > div, + div.nboutput.container div.prompt > div { + position: unset; + } +} + +/* disable scrollbars and line breaks on prompts */ +div.nbinput.container div.prompt pre, +div.nboutput.container div.prompt pre { + overflow: hidden; + white-space: pre; +} + +/* input/output area */ +div.nbinput.container div.input_area, +div.nboutput.container div.output_area { + -webkit-flex: 1; + flex: 1; + overflow: auto; +} +@media (max-width: 540px) { + div.nbinput.container div.input_area, + div.nboutput.container div.output_area { + width: 100%; + } +} + +/* input area */ +div.nbinput.container div.input_area { + border: 1px solid #e0e0e0; + border-radius: 2px; + /*background: #f5f5f5;*/ +} + +/* override MathJax center alignment in output cells */ +div.nboutput.container div[class*=MathJax] { + text-align: left !important; +} + +/* override sphinx.ext.imgmath center alignment in output cells */ +div.nboutput.container div.math p { + text-align: left; +} + +/* standard error */ +div.nboutput.container div.output_area.stderr { + background: #fdd; +} + +/* ANSI colors */ +.ansi-black-fg { color: #3E424D; } +.ansi-black-bg { background-color: #3E424D; } +.ansi-black-intense-fg { color: #282C36; } +.ansi-black-intense-bg { background-color: #282C36; } +.ansi-red-fg { color: #E75C58; } +.ansi-red-bg { background-color: #E75C58; } +.ansi-red-intense-fg { color: #B22B31; } +.ansi-red-intense-bg { background-color: #B22B31; } +.ansi-green-fg { color: #00A250; } +.ansi-green-bg { background-color: #00A250; } +.ansi-green-intense-fg { color: #007427; } +.ansi-green-intense-bg { background-color: #007427; } +.ansi-yellow-fg { color: #DDB62B; } +.ansi-yellow-bg { background-color: #DDB62B; } +.ansi-yellow-intense-fg { color: #B27D12; } +.ansi-yellow-intense-bg { background-color: #B27D12; } +.ansi-blue-fg { color: #208FFB; } +.ansi-blue-bg { background-color: #208FFB; } +.ansi-blue-intense-fg { color: #0065CA; } +.ansi-blue-intense-bg { background-color: #0065CA; } +.ansi-magenta-fg { color: #D160C4; } +.ansi-magenta-bg { background-color: #D160C4; } +.ansi-magenta-intense-fg { color: #A03196; } +.ansi-magenta-intense-bg { background-color: #A03196; } +.ansi-cyan-fg { color: #60C6C8; } +.ansi-cyan-bg { background-color: #60C6C8; } +.ansi-cyan-intense-fg { color: #258F8F; } +.ansi-cyan-intense-bg { background-color: #258F8F; } +.ansi-white-fg { color: #C5C1B4; } +.ansi-white-bg { background-color: #C5C1B4; } +.ansi-white-intense-fg { color: #A1A6B2; } +.ansi-white-intense-bg { background-color: #A1A6B2; } + +.ansi-default-inverse-fg { color: #FFFFFF; } +.ansi-default-inverse-bg { background-color: #000000; } + +.ansi-bold { font-weight: bold; } +.ansi-underline { text-decoration: underline; } + + +div.nbinput.container div.input_area div[class*=highlight] > pre, +div.nboutput.container div.output_area div[class*=highlight] > pre, +div.nboutput.container div.output_area div[class*=highlight].math, +div.nboutput.container div.output_area.rendered_html, +div.nboutput.container div.output_area > div.output_javascript, +div.nboutput.container div.output_area:not(.rendered_html) > img{ + padding: 5px; + margin: 0; +} + +/* fix copybtn overflow problem in chromium (needed for 'sphinx_copybutton') */ +div.nbinput.container div.input_area > div[class^='highlight'], +div.nboutput.container div.output_area > div[class^='highlight']{ + overflow-y: hidden; +} + +/* hide copy button on prompts for 'sphinx_copybutton' extension ... */ +.prompt .copybtn, +/* ... and 'sphinx_immaterial' theme */ +.prompt .md-clipboard.md-icon { + display: none; +} + +/* Some additional styling taken form the Jupyter notebook CSS */ +.jp-RenderedHTMLCommon table, +div.rendered_html table { + border: none; + border-collapse: collapse; + border-spacing: 0; + color: black; + font-size: 12px; + table-layout: fixed; +} +.jp-RenderedHTMLCommon thead, +div.rendered_html thead { + border-bottom: 1px solid black; + vertical-align: bottom; +} +.jp-RenderedHTMLCommon tr, +.jp-RenderedHTMLCommon th, +.jp-RenderedHTMLCommon td, +div.rendered_html tr, +div.rendered_html th, +div.rendered_html td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +.jp-RenderedHTMLCommon th, +div.rendered_html th { + font-weight: bold; +} +.jp-RenderedHTMLCommon tbody tr:nth-child(odd), +div.rendered_html tbody tr:nth-child(odd) { + background: #f5f5f5; +} +.jp-RenderedHTMLCommon tbody tr:hover, +div.rendered_html tbody tr:hover { + background: rgba(66, 165, 245, 0.2); +} + diff --git a/sed/2.1.0/_static/nbsphinx-gallery.css b/sed/2.1.0/_static/nbsphinx-gallery.css new file mode 100644 index 0000000..365c27a --- /dev/null +++ b/sed/2.1.0/_static/nbsphinx-gallery.css @@ -0,0 +1,31 @@ +.nbsphinx-gallery { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); + gap: 5px; + margin-top: 1em; + margin-bottom: 1em; +} + +.nbsphinx-gallery > a { + padding: 5px; + border: 1px dotted currentColor; + border-radius: 2px; + text-align: center; +} + +.nbsphinx-gallery > a:hover { + border-style: solid; +} + +.nbsphinx-gallery img { + max-width: 100%; + max-height: 100%; +} + +.nbsphinx-gallery > a > div:first-child { + display: flex; + align-items: start; + justify-content: center; + height: 120px; + margin-bottom: 5px; +} diff --git a/sed/2.1.0/_static/nbsphinx-no-thumbnail.svg b/sed/2.1.0/_static/nbsphinx-no-thumbnail.svg new file mode 100644 index 0000000..9dca758 --- /dev/null +++ b/sed/2.1.0/_static/nbsphinx-no-thumbnail.svg @@ -0,0 +1,9 @@ + + + + diff --git a/sed/2.1.0/_static/plus.png b/sed/2.1.0/_static/plus.png new file mode 100644 index 0000000000000000000000000000000000000000..7107cec93a979b9a5f64843235a16651d563ce2d GIT binary patch literal 90 zcmeAS@N?(olHy`uVBq!ia0vp^+#t*WBp7;*Yy1LIik>cxAr*|t7R?Mi>2?kWtu>-2 m3q%Vub%g%s<8sJhVPMczOq}xhg9DJoz~JfX=d#Wzp$Pyb1r*Kz literal 0 HcmV?d00001 diff --git a/sed/2.1.0/_static/pygments.css b/sed/2.1.0/_static/pygments.css new file mode 100644 index 0000000..012e6a0 --- /dev/null +++ b/sed/2.1.0/_static/pygments.css @@ -0,0 +1,152 @@ +html[data-theme="light"] .highlight pre { line-height: 125%; } +html[data-theme="light"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight .hll { background-color: #fae4c2 } +html[data-theme="light"] .highlight { background: #fefefe; color: #080808 } +html[data-theme="light"] .highlight .c { color: #515151 } /* Comment */ +html[data-theme="light"] .highlight .err { color: #a12236 } /* Error */ +html[data-theme="light"] .highlight .k { color: #6730c5 } /* Keyword */ +html[data-theme="light"] .highlight .l { color: #7f4707 } /* Literal */ +html[data-theme="light"] .highlight .n { color: #080808 } /* Name */ +html[data-theme="light"] .highlight .o { color: #00622f } /* Operator */ +html[data-theme="light"] .highlight .p { color: #080808 } /* Punctuation */ +html[data-theme="light"] .highlight .ch { color: #515151 } /* Comment.Hashbang */ +html[data-theme="light"] .highlight .cm { color: #515151 } /* Comment.Multiline */ +html[data-theme="light"] .highlight .cp { color: #515151 } /* Comment.Preproc */ +html[data-theme="light"] .highlight .cpf { color: #515151 } /* Comment.PreprocFile */ +html[data-theme="light"] .highlight .c1 { color: #515151 } /* Comment.Single */ +html[data-theme="light"] .highlight .cs { color: #515151 } /* Comment.Special */ +html[data-theme="light"] .highlight .gd { color: #005b82 } /* Generic.Deleted */ +html[data-theme="light"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="light"] .highlight .gh { color: #005b82 } /* Generic.Heading */ +html[data-theme="light"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="light"] .highlight .gu { color: #005b82 } /* Generic.Subheading */ +html[data-theme="light"] .highlight .kc { color: #6730c5 } /* Keyword.Constant */ +html[data-theme="light"] .highlight .kd { color: #6730c5 } /* Keyword.Declaration */ +html[data-theme="light"] .highlight .kn { color: #6730c5 } /* Keyword.Namespace */ +html[data-theme="light"] .highlight .kp { color: #6730c5 } /* Keyword.Pseudo */ +html[data-theme="light"] .highlight .kr { color: #6730c5 } /* Keyword.Reserved */ +html[data-theme="light"] .highlight .kt { color: #7f4707 } /* Keyword.Type */ +html[data-theme="light"] .highlight .ld { color: #7f4707 } /* Literal.Date */ +html[data-theme="light"] .highlight .m { color: #7f4707 } /* Literal.Number */ +html[data-theme="light"] .highlight .s { color: #00622f } /* Literal.String */ +html[data-theme="light"] .highlight .na { color: #912583 } /* Name.Attribute */ +html[data-theme="light"] .highlight .nb { color: #7f4707 } /* Name.Builtin */ +html[data-theme="light"] .highlight .nc { color: #005b82 } /* Name.Class */ +html[data-theme="light"] .highlight .no { color: #005b82 } /* Name.Constant */ +html[data-theme="light"] .highlight .nd { color: #7f4707 } /* Name.Decorator */ +html[data-theme="light"] .highlight .ni { color: #00622f } /* Name.Entity */ +html[data-theme="light"] .highlight .ne { color: #6730c5 } /* Name.Exception */ +html[data-theme="light"] .highlight .nf { color: #005b82 } /* Name.Function */ +html[data-theme="light"] .highlight .nl { color: #7f4707 } /* Name.Label */ +html[data-theme="light"] .highlight .nn { color: #080808 } /* Name.Namespace */ +html[data-theme="light"] .highlight .nx { color: #080808 } /* Name.Other */ +html[data-theme="light"] .highlight .py { color: #005b82 } /* Name.Property */ +html[data-theme="light"] .highlight .nt { color: #005b82 } /* Name.Tag */ +html[data-theme="light"] .highlight .nv { color: #a12236 } /* Name.Variable */ +html[data-theme="light"] .highlight .ow { color: #6730c5 } /* Operator.Word */ +html[data-theme="light"] .highlight .pm { color: #080808 } /* Punctuation.Marker */ +html[data-theme="light"] .highlight .w { color: #080808 } /* Text.Whitespace */ +html[data-theme="light"] .highlight .mb { color: #7f4707 } /* Literal.Number.Bin */ +html[data-theme="light"] .highlight .mf { color: #7f4707 } /* Literal.Number.Float */ +html[data-theme="light"] .highlight .mh { color: #7f4707 } /* Literal.Number.Hex */ +html[data-theme="light"] .highlight .mi { color: #7f4707 } /* Literal.Number.Integer */ +html[data-theme="light"] .highlight .mo { color: #7f4707 } /* Literal.Number.Oct */ +html[data-theme="light"] .highlight .sa { color: #00622f } /* Literal.String.Affix */ +html[data-theme="light"] .highlight .sb { color: #00622f } /* Literal.String.Backtick */ +html[data-theme="light"] .highlight .sc { color: #00622f } /* Literal.String.Char */ +html[data-theme="light"] .highlight .dl { color: #00622f } /* Literal.String.Delimiter */ +html[data-theme="light"] .highlight .sd { color: #00622f } /* Literal.String.Doc */ +html[data-theme="light"] .highlight .s2 { color: #00622f } /* Literal.String.Double */ +html[data-theme="light"] .highlight .se { color: #00622f } /* Literal.String.Escape */ +html[data-theme="light"] .highlight .sh { color: #00622f } /* Literal.String.Heredoc */ +html[data-theme="light"] .highlight .si { color: #00622f } /* Literal.String.Interpol */ +html[data-theme="light"] .highlight .sx { color: #00622f } /* Literal.String.Other */ +html[data-theme="light"] .highlight .sr { color: #a12236 } /* Literal.String.Regex */ +html[data-theme="light"] .highlight .s1 { color: #00622f } /* Literal.String.Single */ +html[data-theme="light"] .highlight .ss { color: #005b82 } /* Literal.String.Symbol */ +html[data-theme="light"] .highlight .bp { color: #7f4707 } /* Name.Builtin.Pseudo */ +html[data-theme="light"] .highlight .fm { color: #005b82 } /* Name.Function.Magic */ +html[data-theme="light"] .highlight .vc { color: #a12236 } /* Name.Variable.Class */ +html[data-theme="light"] .highlight .vg { color: #a12236 } /* Name.Variable.Global */ +html[data-theme="light"] .highlight .vi { color: #a12236 } /* Name.Variable.Instance */ +html[data-theme="light"] .highlight .vm { color: #7f4707 } /* Name.Variable.Magic */ +html[data-theme="light"] .highlight .il { color: #7f4707 } /* Literal.Number.Integer.Long */ +html[data-theme="dark"] .highlight pre { line-height: 125%; } +html[data-theme="dark"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight .hll { background-color: #ffd9002e } +html[data-theme="dark"] .highlight { background: #2b2b2b; color: #f8f8f2 } +html[data-theme="dark"] .highlight .c { color: #ffd900 } /* Comment */ +html[data-theme="dark"] .highlight .err { color: #ffa07a } /* Error */ +html[data-theme="dark"] .highlight .k { color: #dcc6e0 } /* Keyword */ +html[data-theme="dark"] .highlight .l { color: #ffd900 } /* Literal */ +html[data-theme="dark"] .highlight .n { color: #f8f8f2 } /* Name */ +html[data-theme="dark"] .highlight .o { color: #abe338 } /* Operator */ +html[data-theme="dark"] .highlight .p { color: #f8f8f2 } /* Punctuation */ +html[data-theme="dark"] .highlight .ch { color: #ffd900 } /* Comment.Hashbang */ +html[data-theme="dark"] .highlight .cm { color: #ffd900 } /* Comment.Multiline */ +html[data-theme="dark"] .highlight .cp { color: #ffd900 } /* Comment.Preproc */ +html[data-theme="dark"] .highlight .cpf { color: #ffd900 } /* Comment.PreprocFile */ +html[data-theme="dark"] .highlight .c1 { color: #ffd900 } /* Comment.Single */ +html[data-theme="dark"] .highlight .cs { color: #ffd900 } /* Comment.Special */ +html[data-theme="dark"] .highlight .gd { color: #00e0e0 } /* Generic.Deleted */ +html[data-theme="dark"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="dark"] .highlight .gh { color: #00e0e0 } /* Generic.Heading */ +html[data-theme="dark"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="dark"] .highlight .gu { color: #00e0e0 } /* Generic.Subheading */ +html[data-theme="dark"] .highlight .kc { color: #dcc6e0 } /* Keyword.Constant */ +html[data-theme="dark"] .highlight .kd { color: #dcc6e0 } /* Keyword.Declaration */ +html[data-theme="dark"] .highlight .kn { color: #dcc6e0 } /* Keyword.Namespace */ +html[data-theme="dark"] .highlight .kp { color: #dcc6e0 } /* Keyword.Pseudo */ +html[data-theme="dark"] .highlight .kr { color: #dcc6e0 } /* Keyword.Reserved */ +html[data-theme="dark"] .highlight .kt { color: #ffd900 } /* Keyword.Type */ +html[data-theme="dark"] .highlight .ld { color: #ffd900 } /* Literal.Date */ +html[data-theme="dark"] .highlight .m { color: #ffd900 } /* Literal.Number */ +html[data-theme="dark"] .highlight .s { color: #abe338 } /* Literal.String */ +html[data-theme="dark"] .highlight .na { color: #ffd900 } /* Name.Attribute */ +html[data-theme="dark"] .highlight .nb { color: #ffd900 } /* Name.Builtin */ +html[data-theme="dark"] .highlight .nc { color: #00e0e0 } /* Name.Class */ +html[data-theme="dark"] .highlight .no { color: #00e0e0 } /* Name.Constant */ +html[data-theme="dark"] .highlight .nd { color: #ffd900 } /* Name.Decorator */ +html[data-theme="dark"] .highlight .ni { color: #abe338 } /* Name.Entity */ +html[data-theme="dark"] .highlight .ne { color: #dcc6e0 } /* Name.Exception */ +html[data-theme="dark"] .highlight .nf { color: #00e0e0 } /* Name.Function */ +html[data-theme="dark"] .highlight .nl { color: #ffd900 } /* Name.Label */ +html[data-theme="dark"] .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ +html[data-theme="dark"] .highlight .nx { color: #f8f8f2 } /* Name.Other */ +html[data-theme="dark"] .highlight .py { color: #00e0e0 } /* Name.Property */ +html[data-theme="dark"] .highlight .nt { color: #00e0e0 } /* Name.Tag */ +html[data-theme="dark"] .highlight .nv { color: #ffa07a } /* Name.Variable */ +html[data-theme="dark"] .highlight .ow { color: #dcc6e0 } /* Operator.Word */ +html[data-theme="dark"] .highlight .pm { color: #f8f8f2 } /* Punctuation.Marker */ +html[data-theme="dark"] .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ +html[data-theme="dark"] .highlight .mb { color: #ffd900 } /* Literal.Number.Bin */ +html[data-theme="dark"] .highlight .mf { color: #ffd900 } /* Literal.Number.Float */ +html[data-theme="dark"] .highlight .mh { color: #ffd900 } /* Literal.Number.Hex */ +html[data-theme="dark"] .highlight .mi { color: #ffd900 } /* Literal.Number.Integer */ +html[data-theme="dark"] .highlight .mo { color: #ffd900 } /* Literal.Number.Oct */ +html[data-theme="dark"] .highlight .sa { color: #abe338 } /* Literal.String.Affix */ +html[data-theme="dark"] .highlight .sb { color: #abe338 } /* Literal.String.Backtick */ +html[data-theme="dark"] .highlight .sc { color: #abe338 } /* Literal.String.Char */ +html[data-theme="dark"] .highlight .dl { color: #abe338 } /* Literal.String.Delimiter */ +html[data-theme="dark"] .highlight .sd { color: #abe338 } /* Literal.String.Doc */ +html[data-theme="dark"] .highlight .s2 { color: #abe338 } /* Literal.String.Double */ +html[data-theme="dark"] .highlight .se { color: #abe338 } /* Literal.String.Escape */ +html[data-theme="dark"] .highlight .sh { color: #abe338 } /* Literal.String.Heredoc */ +html[data-theme="dark"] .highlight .si { color: #abe338 } /* Literal.String.Interpol */ +html[data-theme="dark"] .highlight .sx { color: #abe338 } /* Literal.String.Other */ +html[data-theme="dark"] .highlight .sr { color: #ffa07a } /* Literal.String.Regex */ +html[data-theme="dark"] .highlight .s1 { color: #abe338 } /* Literal.String.Single */ +html[data-theme="dark"] .highlight .ss { color: #00e0e0 } /* Literal.String.Symbol */ +html[data-theme="dark"] .highlight .bp { color: #ffd900 } /* Name.Builtin.Pseudo */ +html[data-theme="dark"] .highlight .fm { color: #00e0e0 } /* Name.Function.Magic */ +html[data-theme="dark"] .highlight .vc { color: #ffa07a } /* Name.Variable.Class */ +html[data-theme="dark"] .highlight .vg { color: #ffa07a } /* Name.Variable.Global */ +html[data-theme="dark"] .highlight .vi { color: #ffa07a } /* Name.Variable.Instance */ +html[data-theme="dark"] .highlight .vm { color: #ffd900 } /* Name.Variable.Magic */ +html[data-theme="dark"] .highlight .il { color: #ffd900 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/sed/2.1.0/_static/scripts/bootstrap.js b/sed/2.1.0/_static/scripts/bootstrap.js new file mode 100644 index 0000000..c8178de --- /dev/null +++ b/sed/2.1.0/_static/scripts/bootstrap.js @@ -0,0 +1,3 @@ +/*! For license information please see bootstrap.js.LICENSE.txt */ +(()=>{"use strict";var t={d:(e,i)=>{for(var n in i)t.o(i,n)&&!t.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:i[n]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{afterMain:()=>E,afterRead:()=>v,afterWrite:()=>C,applyStyles:()=>$,arrow:()=>J,auto:()=>a,basePlacements:()=>l,beforeMain:()=>y,beforeRead:()=>_,beforeWrite:()=>A,bottom:()=>s,clippingParents:()=>d,computeStyles:()=>it,createPopper:()=>Dt,createPopperBase:()=>St,createPopperLite:()=>$t,detectOverflow:()=>_t,end:()=>h,eventListeners:()=>st,flip:()=>bt,hide:()=>wt,left:()=>r,main:()=>w,modifierPhases:()=>O,offset:()=>Et,placements:()=>g,popper:()=>f,popperGenerator:()=>Lt,popperOffsets:()=>At,preventOverflow:()=>Tt,read:()=>b,reference:()=>p,right:()=>o,start:()=>c,top:()=>n,variationPlacements:()=>m,viewport:()=>u,write:()=>T});var i={};t.r(i),t.d(i,{Alert:()=>Oe,Button:()=>ke,Carousel:()=>li,Collapse:()=>Ei,Dropdown:()=>Ki,Modal:()=>Ln,Offcanvas:()=>Kn,Popover:()=>bs,ScrollSpy:()=>Ls,Tab:()=>Js,Toast:()=>po,Tooltip:()=>fs});var n="top",s="bottom",o="right",r="left",a="auto",l=[n,s,o,r],c="start",h="end",d="clippingParents",u="viewport",f="popper",p="reference",m=l.reduce((function(t,e){return t.concat([e+"-"+c,e+"-"+h])}),[]),g=[].concat(l,[a]).reduce((function(t,e){return t.concat([e,e+"-"+c,e+"-"+h])}),[]),_="beforeRead",b="read",v="afterRead",y="beforeMain",w="main",E="afterMain",A="beforeWrite",T="write",C="afterWrite",O=[_,b,v,y,w,E,A,T,C];function x(t){return t?(t.nodeName||"").toLowerCase():null}function k(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function L(t){return t instanceof k(t).Element||t instanceof Element}function S(t){return t instanceof k(t).HTMLElement||t instanceof HTMLElement}function D(t){return"undefined"!=typeof ShadowRoot&&(t instanceof k(t).ShadowRoot||t instanceof ShadowRoot)}const $={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];S(s)&&x(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});S(n)&&x(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function I(t){return t.split("-")[0]}var N=Math.max,P=Math.min,M=Math.round;function j(){var t=navigator.userAgentData;return null!=t&&t.brands&&Array.isArray(t.brands)?t.brands.map((function(t){return t.brand+"/"+t.version})).join(" "):navigator.userAgent}function F(){return!/^((?!chrome|android).)*safari/i.test(j())}function H(t,e,i){void 0===e&&(e=!1),void 0===i&&(i=!1);var n=t.getBoundingClientRect(),s=1,o=1;e&&S(t)&&(s=t.offsetWidth>0&&M(n.width)/t.offsetWidth||1,o=t.offsetHeight>0&&M(n.height)/t.offsetHeight||1);var r=(L(t)?k(t):window).visualViewport,a=!F()&&i,l=(n.left+(a&&r?r.offsetLeft:0))/s,c=(n.top+(a&&r?r.offsetTop:0))/o,h=n.width/s,d=n.height/o;return{width:h,height:d,top:c,right:l+h,bottom:c+d,left:l,x:l,y:c}}function B(t){var e=H(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function W(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&D(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function z(t){return k(t).getComputedStyle(t)}function R(t){return["table","td","th"].indexOf(x(t))>=0}function q(t){return((L(t)?t.ownerDocument:t.document)||window.document).documentElement}function V(t){return"html"===x(t)?t:t.assignedSlot||t.parentNode||(D(t)?t.host:null)||q(t)}function Y(t){return S(t)&&"fixed"!==z(t).position?t.offsetParent:null}function K(t){for(var e=k(t),i=Y(t);i&&R(i)&&"static"===z(i).position;)i=Y(i);return i&&("html"===x(i)||"body"===x(i)&&"static"===z(i).position)?e:i||function(t){var e=/firefox/i.test(j());if(/Trident/i.test(j())&&S(t)&&"fixed"===z(t).position)return null;var i=V(t);for(D(i)&&(i=i.host);S(i)&&["html","body"].indexOf(x(i))<0;){var n=z(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function Q(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}function X(t,e,i){return N(t,P(e,i))}function U(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function G(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const J={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,a=t.name,c=t.options,h=i.elements.arrow,d=i.modifiersData.popperOffsets,u=I(i.placement),f=Q(u),p=[r,o].indexOf(u)>=0?"height":"width";if(h&&d){var m=function(t,e){return U("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:G(t,l))}(c.padding,i),g=B(h),_="y"===f?n:r,b="y"===f?s:o,v=i.rects.reference[p]+i.rects.reference[f]-d[f]-i.rects.popper[p],y=d[f]-i.rects.reference[f],w=K(h),E=w?"y"===f?w.clientHeight||0:w.clientWidth||0:0,A=v/2-y/2,T=m[_],C=E-g[p]-m[b],O=E/2-g[p]/2+A,x=X(T,O,C),k=f;i.modifiersData[a]=((e={})[k]=x,e.centerOffset=x-O,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&W(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function Z(t){return t.split("-")[1]}var tt={top:"auto",right:"auto",bottom:"auto",left:"auto"};function et(t){var e,i=t.popper,a=t.popperRect,l=t.placement,c=t.variation,d=t.offsets,u=t.position,f=t.gpuAcceleration,p=t.adaptive,m=t.roundOffsets,g=t.isFixed,_=d.x,b=void 0===_?0:_,v=d.y,y=void 0===v?0:v,w="function"==typeof m?m({x:b,y}):{x:b,y};b=w.x,y=w.y;var E=d.hasOwnProperty("x"),A=d.hasOwnProperty("y"),T=r,C=n,O=window;if(p){var x=K(i),L="clientHeight",S="clientWidth";x===k(i)&&"static"!==z(x=q(i)).position&&"absolute"===u&&(L="scrollHeight",S="scrollWidth"),(l===n||(l===r||l===o)&&c===h)&&(C=s,y-=(g&&x===O&&O.visualViewport?O.visualViewport.height:x[L])-a.height,y*=f?1:-1),l!==r&&(l!==n&&l!==s||c!==h)||(T=o,b-=(g&&x===O&&O.visualViewport?O.visualViewport.width:x[S])-a.width,b*=f?1:-1)}var D,$=Object.assign({position:u},p&&tt),I=!0===m?function(t,e){var i=t.x,n=t.y,s=e.devicePixelRatio||1;return{x:M(i*s)/s||0,y:M(n*s)/s||0}}({x:b,y},k(i)):{x:b,y};return b=I.x,y=I.y,f?Object.assign({},$,((D={})[C]=A?"0":"",D[T]=E?"0":"",D.transform=(O.devicePixelRatio||1)<=1?"translate("+b+"px, "+y+"px)":"translate3d("+b+"px, "+y+"px, 0)",D)):Object.assign({},$,((e={})[C]=A?y+"px":"",e[T]=E?b+"px":"",e.transform="",e))}const it={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:I(e.placement),variation:Z(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s,isFixed:"fixed"===e.options.strategy};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,et(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,et(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var nt={passive:!0};const st={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=k(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,nt)})),a&&l.addEventListener("resize",i.update,nt),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,nt)})),a&&l.removeEventListener("resize",i.update,nt)}},data:{}};var ot={left:"right",right:"left",bottom:"top",top:"bottom"};function rt(t){return t.replace(/left|right|bottom|top/g,(function(t){return ot[t]}))}var at={start:"end",end:"start"};function lt(t){return t.replace(/start|end/g,(function(t){return at[t]}))}function ct(t){var e=k(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ht(t){return H(q(t)).left+ct(t).scrollLeft}function dt(t){var e=z(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function ut(t){return["html","body","#document"].indexOf(x(t))>=0?t.ownerDocument.body:S(t)&&dt(t)?t:ut(V(t))}function ft(t,e){var i;void 0===e&&(e=[]);var n=ut(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=k(n),r=s?[o].concat(o.visualViewport||[],dt(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(ft(V(r)))}function pt(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function mt(t,e,i){return e===u?pt(function(t,e){var i=k(t),n=q(t),s=i.visualViewport,o=n.clientWidth,r=n.clientHeight,a=0,l=0;if(s){o=s.width,r=s.height;var c=F();(c||!c&&"fixed"===e)&&(a=s.offsetLeft,l=s.offsetTop)}return{width:o,height:r,x:a+ht(t),y:l}}(t,i)):L(e)?function(t,e){var i=H(t,!1,"fixed"===e);return i.top=i.top+t.clientTop,i.left=i.left+t.clientLeft,i.bottom=i.top+t.clientHeight,i.right=i.left+t.clientWidth,i.width=t.clientWidth,i.height=t.clientHeight,i.x=i.left,i.y=i.top,i}(e,i):pt(function(t){var e,i=q(t),n=ct(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=N(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=N(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ht(t),l=-n.scrollTop;return"rtl"===z(s||i).direction&&(a+=N(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(q(t)))}function gt(t){var e,i=t.reference,a=t.element,l=t.placement,d=l?I(l):null,u=l?Z(l):null,f=i.x+i.width/2-a.width/2,p=i.y+i.height/2-a.height/2;switch(d){case n:e={x:f,y:i.y-a.height};break;case s:e={x:f,y:i.y+i.height};break;case o:e={x:i.x+i.width,y:p};break;case r:e={x:i.x-a.width,y:p};break;default:e={x:i.x,y:i.y}}var m=d?Q(d):null;if(null!=m){var g="y"===m?"height":"width";switch(u){case c:e[m]=e[m]-(i[g]/2-a[g]/2);break;case h:e[m]=e[m]+(i[g]/2-a[g]/2)}}return e}function _t(t,e){void 0===e&&(e={});var i=e,r=i.placement,a=void 0===r?t.placement:r,c=i.strategy,h=void 0===c?t.strategy:c,m=i.boundary,g=void 0===m?d:m,_=i.rootBoundary,b=void 0===_?u:_,v=i.elementContext,y=void 0===v?f:v,w=i.altBoundary,E=void 0!==w&&w,A=i.padding,T=void 0===A?0:A,C=U("number"!=typeof T?T:G(T,l)),O=y===f?p:f,k=t.rects.popper,D=t.elements[E?O:y],$=function(t,e,i,n){var s="clippingParents"===e?function(t){var e=ft(V(t)),i=["absolute","fixed"].indexOf(z(t).position)>=0&&S(t)?K(t):t;return L(i)?e.filter((function(t){return L(t)&&W(t,i)&&"body"!==x(t)})):[]}(t):[].concat(e),o=[].concat(s,[i]),r=o[0],a=o.reduce((function(e,i){var s=mt(t,i,n);return e.top=N(s.top,e.top),e.right=P(s.right,e.right),e.bottom=P(s.bottom,e.bottom),e.left=N(s.left,e.left),e}),mt(t,r,n));return a.width=a.right-a.left,a.height=a.bottom-a.top,a.x=a.left,a.y=a.top,a}(L(D)?D:D.contextElement||q(t.elements.popper),g,b,h),I=H(t.elements.reference),M=gt({reference:I,element:k,strategy:"absolute",placement:a}),j=pt(Object.assign({},k,M)),F=y===f?j:I,B={top:$.top-F.top+C.top,bottom:F.bottom-$.bottom+C.bottom,left:$.left-F.left+C.left,right:F.right-$.right+C.right},R=t.modifiersData.offset;if(y===f&&R){var Y=R[a];Object.keys(B).forEach((function(t){var e=[o,s].indexOf(t)>=0?1:-1,i=[n,s].indexOf(t)>=0?"y":"x";B[t]+=Y[i]*e}))}return B}const bt={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,h=t.name;if(!e.modifiersData[h]._skip){for(var d=i.mainAxis,u=void 0===d||d,f=i.altAxis,p=void 0===f||f,_=i.fallbackPlacements,b=i.padding,v=i.boundary,y=i.rootBoundary,w=i.altBoundary,E=i.flipVariations,A=void 0===E||E,T=i.allowedAutoPlacements,C=e.options.placement,O=I(C),x=_||(O!==C&&A?function(t){if(I(t)===a)return[];var e=rt(t);return[lt(t),e,lt(e)]}(C):[rt(C)]),k=[C].concat(x).reduce((function(t,i){return t.concat(I(i)===a?function(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,c=i.allowedAutoPlacements,h=void 0===c?g:c,d=Z(n),u=d?a?m:m.filter((function(t){return Z(t)===d})):l,f=u.filter((function(t){return h.indexOf(t)>=0}));0===f.length&&(f=u);var p=f.reduce((function(e,i){return e[i]=_t(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[I(i)],e}),{});return Object.keys(p).sort((function(t,e){return p[t]-p[e]}))}(e,{placement:i,boundary:v,rootBoundary:y,padding:b,flipVariations:A,allowedAutoPlacements:T}):i)}),[]),L=e.rects.reference,S=e.rects.popper,D=new Map,$=!0,N=k[0],P=0;P=0,B=H?"width":"height",W=_t(e,{placement:M,boundary:v,rootBoundary:y,altBoundary:w,padding:b}),z=H?F?o:r:F?s:n;L[B]>S[B]&&(z=rt(z));var R=rt(z),q=[];if(u&&q.push(W[j]<=0),p&&q.push(W[z]<=0,W[R]<=0),q.every((function(t){return t}))){N=M,$=!1;break}D.set(M,q)}if($)for(var V=function(t){var e=k.find((function(e){var i=D.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return N=e,"break"},Y=A?3:1;Y>0&&"break"!==V(Y);Y--);e.placement!==N&&(e.modifiersData[h]._skip=!0,e.placement=N,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function vt(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function yt(t){return[n,o,s,r].some((function(e){return t[e]>=0}))}const wt={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=_t(e,{elementContext:"reference"}),a=_t(e,{altBoundary:!0}),l=vt(r,n),c=vt(a,s,o),h=yt(l),d=yt(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},Et={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,s=t.name,a=i.offset,l=void 0===a?[0,0]:a,c=g.reduce((function(t,i){return t[i]=function(t,e,i){var s=I(t),a=[r,n].indexOf(s)>=0?-1:1,l="function"==typeof i?i(Object.assign({},e,{placement:t})):i,c=l[0],h=l[1];return c=c||0,h=(h||0)*a,[r,o].indexOf(s)>=0?{x:h,y:c}:{x:c,y:h}}(i,e.rects,l),t}),{}),h=c[e.placement],d=h.x,u=h.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=d,e.modifiersData.popperOffsets.y+=u),e.modifiersData[s]=c}},At={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=gt({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},Tt={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,a=t.name,l=i.mainAxis,h=void 0===l||l,d=i.altAxis,u=void 0!==d&&d,f=i.boundary,p=i.rootBoundary,m=i.altBoundary,g=i.padding,_=i.tether,b=void 0===_||_,v=i.tetherOffset,y=void 0===v?0:v,w=_t(e,{boundary:f,rootBoundary:p,padding:g,altBoundary:m}),E=I(e.placement),A=Z(e.placement),T=!A,C=Q(E),O="x"===C?"y":"x",x=e.modifiersData.popperOffsets,k=e.rects.reference,L=e.rects.popper,S="function"==typeof y?y(Object.assign({},e.rects,{placement:e.placement})):y,D="number"==typeof S?{mainAxis:S,altAxis:S}:Object.assign({mainAxis:0,altAxis:0},S),$=e.modifiersData.offset?e.modifiersData.offset[e.placement]:null,M={x:0,y:0};if(x){if(h){var j,F="y"===C?n:r,H="y"===C?s:o,W="y"===C?"height":"width",z=x[C],R=z+w[F],q=z-w[H],V=b?-L[W]/2:0,Y=A===c?k[W]:L[W],U=A===c?-L[W]:-k[W],G=e.elements.arrow,J=b&&G?B(G):{width:0,height:0},tt=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},et=tt[F],it=tt[H],nt=X(0,k[W],J[W]),st=T?k[W]/2-V-nt-et-D.mainAxis:Y-nt-et-D.mainAxis,ot=T?-k[W]/2+V+nt+it+D.mainAxis:U+nt+it+D.mainAxis,rt=e.elements.arrow&&K(e.elements.arrow),at=rt?"y"===C?rt.clientTop||0:rt.clientLeft||0:0,lt=null!=(j=null==$?void 0:$[C])?j:0,ct=z+ot-lt,ht=X(b?P(R,z+st-lt-at):R,z,b?N(q,ct):q);x[C]=ht,M[C]=ht-z}if(u){var dt,ut="x"===C?n:r,ft="x"===C?s:o,pt=x[O],mt="y"===O?"height":"width",gt=pt+w[ut],bt=pt-w[ft],vt=-1!==[n,r].indexOf(E),yt=null!=(dt=null==$?void 0:$[O])?dt:0,wt=vt?gt:pt-k[mt]-L[mt]-yt+D.altAxis,Et=vt?pt+k[mt]+L[mt]-yt-D.altAxis:bt,At=b&&vt?function(t,e,i){var n=X(t,e,i);return n>i?i:n}(wt,pt,Et):X(b?wt:gt,pt,b?Et:bt);x[O]=At,M[O]=At-pt}e.modifiersData[a]=M}},requiresIfExists:["offset"]};function Ct(t,e,i){void 0===i&&(i=!1);var n,s,o=S(e),r=S(e)&&function(t){var e=t.getBoundingClientRect(),i=M(e.width)/t.offsetWidth||1,n=M(e.height)/t.offsetHeight||1;return 1!==i||1!==n}(e),a=q(e),l=H(t,r,i),c={scrollLeft:0,scrollTop:0},h={x:0,y:0};return(o||!o&&!i)&&(("body"!==x(e)||dt(a))&&(c=(n=e)!==k(n)&&S(n)?{scrollLeft:(s=n).scrollLeft,scrollTop:s.scrollTop}:ct(n)),S(e)?((h=H(e,!0)).x+=e.clientLeft,h.y+=e.clientTop):a&&(h.x=ht(a))),{x:l.left+c.scrollLeft-h.x,y:l.top+c.scrollTop-h.y,width:l.width,height:l.height}}function Ot(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var xt={placement:"bottom",modifiers:[],strategy:"absolute"};function kt(){for(var t=arguments.length,e=new Array(t),i=0;iIt.has(t)&&It.get(t).get(e)||null,remove(t,e){if(!It.has(t))return;const i=It.get(t);i.delete(e),0===i.size&&It.delete(t)}},Pt="transitionend",Mt=t=>(t&&window.CSS&&window.CSS.escape&&(t=t.replace(/#([^\s"#']+)/g,((t,e)=>`#${CSS.escape(e)}`))),t),jt=t=>{t.dispatchEvent(new Event(Pt))},Ft=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),Ht=t=>Ft(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(Mt(t)):null,Bt=t=>{if(!Ft(t)||0===t.getClientRects().length)return!1;const e="visible"===getComputedStyle(t).getPropertyValue("visibility"),i=t.closest("details:not([open])");if(!i)return e;if(i!==t){const e=t.closest("summary");if(e&&e.parentNode!==i)return!1;if(null===e)return!1}return e},Wt=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),zt=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?zt(t.parentNode):null},Rt=()=>{},qt=t=>{t.offsetHeight},Vt=()=>window.jQuery&&!document.body.hasAttribute("data-bs-no-jquery")?window.jQuery:null,Yt=[],Kt=()=>"rtl"===document.documentElement.dir,Qt=t=>{var e;e=()=>{const e=Vt();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(Yt.length||document.addEventListener("DOMContentLoaded",(()=>{for(const t of Yt)t()})),Yt.push(e)):e()},Xt=(t,e=[],i=t)=>"function"==typeof t?t(...e):i,Ut=(t,e,i=!0)=>{if(!i)return void Xt(t);const n=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let s=!1;const o=({target:i})=>{i===e&&(s=!0,e.removeEventListener(Pt,o),Xt(t))};e.addEventListener(Pt,o),setTimeout((()=>{s||jt(e)}),n)},Gt=(t,e,i,n)=>{const s=t.length;let o=t.indexOf(e);return-1===o?!i&&n?t[s-1]:t[0]:(o+=i?1:-1,n&&(o=(o+s)%s),t[Math.max(0,Math.min(o,s-1))])},Jt=/[^.]*(?=\..*)\.|.*/,Zt=/\..*/,te=/::\d+$/,ee={};let ie=1;const ne={mouseenter:"mouseover",mouseleave:"mouseout"},se=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function oe(t,e){return e&&`${e}::${ie++}`||t.uidEvent||ie++}function re(t){const e=oe(t);return t.uidEvent=e,ee[e]=ee[e]||{},ee[e]}function ae(t,e,i=null){return Object.values(t).find((t=>t.callable===e&&t.delegationSelector===i))}function le(t,e,i){const n="string"==typeof e,s=n?i:e||i;let o=ue(t);return se.has(o)||(o=t),[n,s,o]}function ce(t,e,i,n,s){if("string"!=typeof e||!t)return;let[o,r,a]=le(e,i,n);if(e in ne){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};r=t(r)}const l=re(t),c=l[a]||(l[a]={}),h=ae(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=oe(r,e.replace(Jt,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(const a of o)if(a===r)return pe(s,{delegateTarget:r}),n.oneOff&&fe.off(t,s.type,e,i),i.apply(r,[s])}}(t,i,r):function(t,e){return function i(n){return pe(n,{delegateTarget:t}),i.oneOff&&fe.off(t,n.type,e),e.apply(t,[n])}}(t,r);u.delegationSelector=o?i:null,u.callable=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function he(t,e,i,n,s){const o=ae(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function de(t,e,i,n){const s=e[i]||{};for(const[o,r]of Object.entries(s))o.includes(n)&&he(t,e,i,r.callable,r.delegationSelector)}function ue(t){return t=t.replace(Zt,""),ne[t]||t}const fe={on(t,e,i,n){ce(t,e,i,n,!1)},one(t,e,i,n){ce(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=le(e,i,n),a=r!==e,l=re(t),c=l[r]||{},h=e.startsWith(".");if(void 0===o){if(h)for(const i of Object.keys(l))de(t,l,i,e.slice(1));for(const[i,n]of Object.entries(c)){const s=i.replace(te,"");a&&!e.includes(s)||he(t,l,r,n.callable,n.delegationSelector)}}else{if(!Object.keys(c).length)return;he(t,l,r,o,s?i:null)}},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=Vt();let s=null,o=!0,r=!0,a=!1;e!==ue(e)&&n&&(s=n.Event(e,i),n(t).trigger(s),o=!s.isPropagationStopped(),r=!s.isImmediatePropagationStopped(),a=s.isDefaultPrevented());const l=pe(new Event(e,{bubbles:o,cancelable:!0}),i);return a&&l.preventDefault(),r&&t.dispatchEvent(l),l.defaultPrevented&&s&&s.preventDefault(),l}};function pe(t,e={}){for(const[i,n]of Object.entries(e))try{t[i]=n}catch(e){Object.defineProperty(t,i,{configurable:!0,get:()=>n})}return t}function me(t){if("true"===t)return!0;if("false"===t)return!1;if(t===Number(t).toString())return Number(t);if(""===t||"null"===t)return null;if("string"!=typeof t)return t;try{return JSON.parse(decodeURIComponent(t))}catch(e){return t}}function ge(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}const _e={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${ge(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${ge(e)}`)},getDataAttributes(t){if(!t)return{};const e={},i=Object.keys(t.dataset).filter((t=>t.startsWith("bs")&&!t.startsWith("bsConfig")));for(const n of i){let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),e[i]=me(t.dataset[n])}return e},getDataAttribute:(t,e)=>me(t.getAttribute(`data-bs-${ge(e)}`))};class be{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){const i=Ft(e)?_e.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof i?i:{},...Ft(e)?_e.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t,e=this.constructor.DefaultType){for(const[n,s]of Object.entries(e)){const e=t[n],o=Ft(e)?"element":null==(i=e)?`${i}`:Object.prototype.toString.call(i).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(s).test(o))throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option "${n}" provided type "${o}" but expected type "${s}".`)}var i}}class ve extends be{constructor(t,e){super(),(t=Ht(t))&&(this._element=t,this._config=this._getConfig(e),Nt.set(this._element,this.constructor.DATA_KEY,this))}dispose(){Nt.remove(this._element,this.constructor.DATA_KEY),fe.off(this._element,this.constructor.EVENT_KEY);for(const t of Object.getOwnPropertyNames(this))this[t]=null}_queueCallback(t,e,i=!0){Ut(t,e,i)}_getConfig(t){return t=this._mergeConfigObj(t,this._element),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}static getInstance(t){return Nt.get(Ht(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.3.3"}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}static eventName(t){return`${t}${this.EVENT_KEY}`}}const ye=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e?e.split(",").map((t=>Mt(t))).join(","):null},we={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode.closest(e);for(;n;)i.push(n),n=n.parentNode.closest(e);return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(",");return this.find(e,t).filter((t=>!Wt(t)&&Bt(t)))},getSelectorFromElement(t){const e=ye(t);return e&&we.findOne(e)?e:null},getElementFromSelector(t){const e=ye(t);return e?we.findOne(e):null},getMultipleElementsFromSelector(t){const e=ye(t);return e?we.find(e):[]}},Ee=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,n=t.NAME;fe.on(document,i,`[data-bs-dismiss="${n}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),Wt(this))return;const s=we.getElementFromSelector(this)||this.closest(`.${n}`);t.getOrCreateInstance(s)[e]()}))},Ae=".bs.alert",Te=`close${Ae}`,Ce=`closed${Ae}`;class Oe extends ve{static get NAME(){return"alert"}close(){if(fe.trigger(this._element,Te).defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),fe.trigger(this._element,Ce),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=Oe.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}Ee(Oe,"close"),Qt(Oe);const xe='[data-bs-toggle="button"]';class ke extends ve{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=ke.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}fe.on(document,"click.bs.button.data-api",xe,(t=>{t.preventDefault();const e=t.target.closest(xe);ke.getOrCreateInstance(e).toggle()})),Qt(ke);const Le=".bs.swipe",Se=`touchstart${Le}`,De=`touchmove${Le}`,$e=`touchend${Le}`,Ie=`pointerdown${Le}`,Ne=`pointerup${Le}`,Pe={endCallback:null,leftCallback:null,rightCallback:null},Me={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class je extends be{constructor(t,e){super(),this._element=t,t&&je.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return Pe}static get DefaultType(){return Me}static get NAME(){return"swipe"}dispose(){fe.off(this._element,Le)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),Xt(this._config.endCallback)}_move(t){this._deltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this._deltaX}_handleSwipe(){const t=Math.abs(this._deltaX);if(t<=40)return;const e=t/this._deltaX;this._deltaX=0,e&&Xt(e>0?this._config.rightCallback:this._config.leftCallback)}_initEvents(){this._supportPointerEvents?(fe.on(this._element,Ie,(t=>this._start(t))),fe.on(this._element,Ne,(t=>this._end(t))),this._element.classList.add("pointer-event")):(fe.on(this._element,Se,(t=>this._start(t))),fe.on(this._element,De,(t=>this._move(t))),fe.on(this._element,$e,(t=>this._end(t))))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||navigator.maxTouchPoints>0}}const Fe=".bs.carousel",He=".data-api",Be="ArrowLeft",We="ArrowRight",ze="next",Re="prev",qe="left",Ve="right",Ye=`slide${Fe}`,Ke=`slid${Fe}`,Qe=`keydown${Fe}`,Xe=`mouseenter${Fe}`,Ue=`mouseleave${Fe}`,Ge=`dragstart${Fe}`,Je=`load${Fe}${He}`,Ze=`click${Fe}${He}`,ti="carousel",ei="active",ii=".active",ni=".carousel-item",si=ii+ni,oi={[Be]:Ve,[We]:qe},ri={interval:5e3,keyboard:!0,pause:"hover",ride:!1,touch:!0,wrap:!0},ai={interval:"(number|boolean)",keyboard:"boolean",pause:"(string|boolean)",ride:"(boolean|string)",touch:"boolean",wrap:"boolean"};class li extends ve{constructor(t,e){super(t,e),this._interval=null,this._activeElement=null,this._isSliding=!1,this.touchTimeout=null,this._swipeHelper=null,this._indicatorsElement=we.findOne(".carousel-indicators",this._element),this._addEventListeners(),this._config.ride===ti&&this.cycle()}static get Default(){return ri}static get DefaultType(){return ai}static get NAME(){return"carousel"}next(){this._slide(ze)}nextWhenVisible(){!document.hidden&&Bt(this._element)&&this.next()}prev(){this._slide(Re)}pause(){this._isSliding&&jt(this._element),this._clearInterval()}cycle(){this._clearInterval(),this._updateInterval(),this._interval=setInterval((()=>this.nextWhenVisible()),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?fe.one(this._element,Ke,(()=>this.cycle())):this.cycle())}to(t){const e=this._getItems();if(t>e.length-1||t<0)return;if(this._isSliding)return void fe.one(this._element,Ke,(()=>this.to(t)));const i=this._getItemIndex(this._getActive());if(i===t)return;const n=t>i?ze:Re;this._slide(n,e[t])}dispose(){this._swipeHelper&&this._swipeHelper.dispose(),super.dispose()}_configAfterMerge(t){return t.defaultInterval=t.interval,t}_addEventListeners(){this._config.keyboard&&fe.on(this._element,Qe,(t=>this._keydown(t))),"hover"===this._config.pause&&(fe.on(this._element,Xe,(()=>this.pause())),fe.on(this._element,Ue,(()=>this._maybeEnableCycle()))),this._config.touch&&je.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of we.find(".carousel-item img",this._element))fe.on(t,Ge,(t=>t.preventDefault()));const t={leftCallback:()=>this._slide(this._directionToOrder(qe)),rightCallback:()=>this._slide(this._directionToOrder(Ve)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((()=>this._maybeEnableCycle()),500+this._config.interval))}};this._swipeHelper=new je(this._element,t)}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=oi[t.key];e&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){if(!this._indicatorsElement)return;const e=we.findOne(ii,this._indicatorsElement);e.classList.remove(ei),e.removeAttribute("aria-current");const i=we.findOne(`[data-bs-slide-to="${t}"]`,this._indicatorsElement);i&&(i.classList.add(ei),i.setAttribute("aria-current","true"))}_updateInterval(){const t=this._activeElement||this._getActive();if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);this._config.interval=e||this._config.defaultInterval}_slide(t,e=null){if(this._isSliding)return;const i=this._getActive(),n=t===ze,s=e||Gt(this._getItems(),i,n,this._config.wrap);if(s===i)return;const o=this._getItemIndex(s),r=e=>fe.trigger(this._element,e,{relatedTarget:s,direction:this._orderToDirection(t),from:this._getItemIndex(i),to:o});if(r(Ye).defaultPrevented)return;if(!i||!s)return;const a=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(o),this._activeElement=s;const l=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";s.classList.add(c),qt(s),i.classList.add(l),s.classList.add(l),this._queueCallback((()=>{s.classList.remove(l,c),s.classList.add(ei),i.classList.remove(ei,c,l),this._isSliding=!1,r(Ke)}),i,this._isAnimated()),a&&this.cycle()}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return we.findOne(si,this._element)}_getItems(){return we.find(ni,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return Kt()?t===qe?Re:ze:t===qe?ze:Re}_orderToDirection(t){return Kt()?t===Re?qe:Ve:t===Re?Ve:qe}static jQueryInterface(t){return this.each((function(){const e=li.getOrCreateInstance(this,t);if("number"!=typeof t){if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}else e.to(t)}))}}fe.on(document,Ze,"[data-bs-slide], [data-bs-slide-to]",(function(t){const e=we.getElementFromSelector(this);if(!e||!e.classList.contains(ti))return;t.preventDefault();const i=li.getOrCreateInstance(e),n=this.getAttribute("data-bs-slide-to");return n?(i.to(n),void i._maybeEnableCycle()):"next"===_e.getDataAttribute(this,"slide")?(i.next(),void i._maybeEnableCycle()):(i.prev(),void i._maybeEnableCycle())})),fe.on(window,Je,(()=>{const t=we.find('[data-bs-ride="carousel"]');for(const e of t)li.getOrCreateInstance(e)})),Qt(li);const ci=".bs.collapse",hi=`show${ci}`,di=`shown${ci}`,ui=`hide${ci}`,fi=`hidden${ci}`,pi=`click${ci}.data-api`,mi="show",gi="collapse",_i="collapsing",bi=`:scope .${gi} .${gi}`,vi='[data-bs-toggle="collapse"]',yi={parent:null,toggle:!0},wi={parent:"(null|element)",toggle:"boolean"};class Ei extends ve{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];const i=we.find(vi);for(const t of i){const e=we.getSelectorFromElement(t),i=we.find(e).filter((t=>t===this._element));null!==e&&i.length&&this._triggerArray.push(t)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return yi}static get DefaultType(){return wi}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t=[];if(this._config.parent&&(t=this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter((t=>t!==this._element)).map((t=>Ei.getOrCreateInstance(t,{toggle:!1})))),t.length&&t[0]._isTransitioning)return;if(fe.trigger(this._element,hi).defaultPrevented)return;for(const e of t)e.hide();const e=this._getDimension();this._element.classList.remove(gi),this._element.classList.add(_i),this._element.style[e]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const i=`scroll${e[0].toUpperCase()+e.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(_i),this._element.classList.add(gi,mi),this._element.style[e]="",fe.trigger(this._element,di)}),this._element,!0),this._element.style[e]=`${this._element[i]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(fe.trigger(this._element,ui).defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,qt(this._element),this._element.classList.add(_i),this._element.classList.remove(gi,mi);for(const t of this._triggerArray){const e=we.getElementFromSelector(t);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([t],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(_i),this._element.classList.add(gi),fe.trigger(this._element,fi)}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(mi)}_configAfterMerge(t){return t.toggle=Boolean(t.toggle),t.parent=Ht(t.parent),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=this._getFirstLevelChildren(vi);for(const e of t){const t=we.getElementFromSelector(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))}}_getFirstLevelChildren(t){const e=we.find(bi,this._config.parent);return we.find(t,this._config.parent).filter((t=>!e.includes(t)))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const i of t)i.classList.toggle("collapsed",!e),i.setAttribute("aria-expanded",e)}static jQueryInterface(t){const e={};return"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1),this.each((function(){const i=Ei.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}fe.on(document,pi,vi,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();for(const t of we.getMultipleElementsFromSelector(this))Ei.getOrCreateInstance(t,{toggle:!1}).toggle()})),Qt(Ei);const Ai="dropdown",Ti=".bs.dropdown",Ci=".data-api",Oi="ArrowUp",xi="ArrowDown",ki=`hide${Ti}`,Li=`hidden${Ti}`,Si=`show${Ti}`,Di=`shown${Ti}`,$i=`click${Ti}${Ci}`,Ii=`keydown${Ti}${Ci}`,Ni=`keyup${Ti}${Ci}`,Pi="show",Mi='[data-bs-toggle="dropdown"]:not(.disabled):not(:disabled)',ji=`${Mi}.${Pi}`,Fi=".dropdown-menu",Hi=Kt()?"top-end":"top-start",Bi=Kt()?"top-start":"top-end",Wi=Kt()?"bottom-end":"bottom-start",zi=Kt()?"bottom-start":"bottom-end",Ri=Kt()?"left-start":"right-start",qi=Kt()?"right-start":"left-start",Vi={autoClose:!0,boundary:"clippingParents",display:"dynamic",offset:[0,2],popperConfig:null,reference:"toggle"},Yi={autoClose:"(boolean|string)",boundary:"(string|element)",display:"string",offset:"(array|string|function)",popperConfig:"(null|object|function)",reference:"(string|element|object)"};class Ki extends ve{constructor(t,e){super(t,e),this._popper=null,this._parent=this._element.parentNode,this._menu=we.next(this._element,Fi)[0]||we.prev(this._element,Fi)[0]||we.findOne(Fi,this._parent),this._inNavbar=this._detectNavbar()}static get Default(){return Vi}static get DefaultType(){return Yi}static get NAME(){return Ai}toggle(){return this._isShown()?this.hide():this.show()}show(){if(Wt(this._element)||this._isShown())return;const t={relatedTarget:this._element};if(!fe.trigger(this._element,Si,t).defaultPrevented){if(this._createPopper(),"ontouchstart"in document.documentElement&&!this._parent.closest(".navbar-nav"))for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Pi),this._element.classList.add(Pi),fe.trigger(this._element,Di,t)}}hide(){if(Wt(this._element)||!this._isShown())return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){if(!fe.trigger(this._element,ki,t).defaultPrevented){if("ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._popper&&this._popper.destroy(),this._menu.classList.remove(Pi),this._element.classList.remove(Pi),this._element.setAttribute("aria-expanded","false"),_e.removeDataAttribute(this._menu,"popper"),fe.trigger(this._element,Li,t)}}_getConfig(t){if("object"==typeof(t=super._getConfig(t)).reference&&!Ft(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${Ai.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(){if(void 0===e)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=this._parent:Ft(this._config.reference)?t=Ht(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const i=this._getPopperConfig();this._popper=Dt(t,this._menu,i)}_isShown(){return this._menu.classList.contains(Pi)}_getPlacement(){const t=this._parent;if(t.classList.contains("dropend"))return Ri;if(t.classList.contains("dropstart"))return qi;if(t.classList.contains("dropup-center"))return"top";if(t.classList.contains("dropdown-center"))return"bottom";const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?Bi:Hi:e?zi:Wi}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return(this._inNavbar||"static"===this._config.display)&&(_e.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,...Xt(this._config.popperConfig,[t])}}_selectMenuItem({key:t,target:e}){const i=we.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter((t=>Bt(t)));i.length&&Gt(i,e,t===xi,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=Ki.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(2===t.button||"keyup"===t.type&&"Tab"!==t.key)return;const e=we.find(ji);for(const i of e){const e=Ki.getInstance(i);if(!e||!1===e._config.autoClose)continue;const n=t.composedPath(),s=n.includes(e._menu);if(n.includes(e._element)||"inside"===e._config.autoClose&&!s||"outside"===e._config.autoClose&&s)continue;if(e._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;const o={relatedTarget:e._element};"click"===t.type&&(o.clickEvent=t),e._completeHide(o)}}static dataApiKeydownHandler(t){const e=/input|textarea/i.test(t.target.tagName),i="Escape"===t.key,n=[Oi,xi].includes(t.key);if(!n&&!i)return;if(e&&!i)return;t.preventDefault();const s=this.matches(Mi)?this:we.prev(this,Mi)[0]||we.next(this,Mi)[0]||we.findOne(Mi,t.delegateTarget.parentNode),o=Ki.getOrCreateInstance(s);if(n)return t.stopPropagation(),o.show(),void o._selectMenuItem(t);o._isShown()&&(t.stopPropagation(),o.hide(),s.focus())}}fe.on(document,Ii,Mi,Ki.dataApiKeydownHandler),fe.on(document,Ii,Fi,Ki.dataApiKeydownHandler),fe.on(document,$i,Ki.clearMenus),fe.on(document,Ni,Ki.clearMenus),fe.on(document,$i,Mi,(function(t){t.preventDefault(),Ki.getOrCreateInstance(this).toggle()})),Qt(Ki);const Qi="backdrop",Xi="show",Ui=`mousedown.bs.${Qi}`,Gi={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},Ji={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};class Zi extends be{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return Gi}static get DefaultType(){return Ji}static get NAME(){return Qi}show(t){if(!this._config.isVisible)return void Xt(t);this._append();const e=this._getElement();this._config.isAnimated&&qt(e),e.classList.add(Xi),this._emulateAnimation((()=>{Xt(t)}))}hide(t){this._config.isVisible?(this._getElement().classList.remove(Xi),this._emulateAnimation((()=>{this.dispose(),Xt(t)}))):Xt(t)}dispose(){this._isAppended&&(fe.off(this._element,Ui),this._element.remove(),this._isAppended=!1)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_configAfterMerge(t){return t.rootElement=Ht(t.rootElement),t}_append(){if(this._isAppended)return;const t=this._getElement();this._config.rootElement.append(t),fe.on(t,Ui,(()=>{Xt(this._config.clickCallback)})),this._isAppended=!0}_emulateAnimation(t){Ut(t,this._getElement(),this._config.isAnimated)}}const tn=".bs.focustrap",en=`focusin${tn}`,nn=`keydown.tab${tn}`,sn="backward",on={autofocus:!0,trapElement:null},rn={autofocus:"boolean",trapElement:"element"};class an extends be{constructor(t){super(),this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}static get Default(){return on}static get DefaultType(){return rn}static get NAME(){return"focustrap"}activate(){this._isActive||(this._config.autofocus&&this._config.trapElement.focus(),fe.off(document,tn),fe.on(document,en,(t=>this._handleFocusin(t))),fe.on(document,nn,(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,fe.off(document,tn))}_handleFocusin(t){const{trapElement:e}=this._config;if(t.target===document||t.target===e||e.contains(t.target))return;const i=we.focusableChildren(e);0===i.length?e.focus():this._lastTabNavDirection===sn?i[i.length-1].focus():i[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?sn:"forward")}}const ln=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",cn=".sticky-top",hn="padding-right",dn="margin-right";class un{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,hn,(e=>e+t)),this._setElementAttributes(ln,hn,(e=>e+t)),this._setElementAttributes(cn,dn,(e=>e-t))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,hn),this._resetElementAttributes(ln,hn),this._resetElementAttributes(cn,dn)}isOverflowing(){return this.getWidth()>0}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t).getPropertyValue(e);t.style.setProperty(e,`${i(Number.parseFloat(s))}px`)}))}_saveInitialAttribute(t,e){const i=t.style.getPropertyValue(e);i&&_e.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=_e.getDataAttribute(t,e);null!==i?(_e.removeDataAttribute(t,e),t.style.setProperty(e,i)):t.style.removeProperty(e)}))}_applyManipulationCallback(t,e){if(Ft(t))e(t);else for(const i of we.find(t,this._element))e(i)}}const fn=".bs.modal",pn=`hide${fn}`,mn=`hidePrevented${fn}`,gn=`hidden${fn}`,_n=`show${fn}`,bn=`shown${fn}`,vn=`resize${fn}`,yn=`click.dismiss${fn}`,wn=`mousedown.dismiss${fn}`,En=`keydown.dismiss${fn}`,An=`click${fn}.data-api`,Tn="modal-open",Cn="show",On="modal-static",xn={backdrop:!0,focus:!0,keyboard:!0},kn={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class Ln extends ve{constructor(t,e){super(t,e),this._dialog=we.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new un,this._addEventListeners()}static get Default(){return xn}static get DefaultType(){return kn}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||fe.trigger(this._element,_n,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(Tn),this._adjustDialog(),this._backdrop.show((()=>this._showElement(t))))}hide(){this._isShown&&!this._isTransitioning&&(fe.trigger(this._element,pn).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove(Cn),this._queueCallback((()=>this._hideModal()),this._element,this._isAnimated())))}dispose(){fe.off(window,fn),fe.off(this._dialog,fn),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Zi({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new an({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;const e=we.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),qt(this._element),this._element.classList.add(Cn),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,fe.trigger(this._element,bn,{relatedTarget:t})}),this._dialog,this._isAnimated())}_addEventListeners(){fe.on(this._element,En,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():this._triggerBackdropTransition())})),fe.on(window,vn,(()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()})),fe.on(this._element,wn,(t=>{fe.one(this._element,yn,(e=>{this._element===t.target&&this._element===e.target&&("static"!==this._config.backdrop?this._config.backdrop&&this.hide():this._triggerBackdropTransition())}))}))}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(Tn),this._resetAdjustments(),this._scrollBar.reset(),fe.trigger(this._element,gn)}))}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(fe.trigger(this._element,mn).defaultPrevented)return;const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(On)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(On),this._queueCallback((()=>{this._element.classList.remove(On),this._queueCallback((()=>{this._element.style.overflowY=e}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;if(i&&!t){const t=Kt()?"paddingLeft":"paddingRight";this._element.style[t]=`${e}px`}if(!i&&t){const t=Kt()?"paddingRight":"paddingLeft";this._element.style[t]=`${e}px`}}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=Ln.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}fe.on(document,An,'[data-bs-toggle="modal"]',(function(t){const e=we.getElementFromSelector(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),fe.one(e,_n,(t=>{t.defaultPrevented||fe.one(e,gn,(()=>{Bt(this)&&this.focus()}))}));const i=we.findOne(".modal.show");i&&Ln.getInstance(i).hide(),Ln.getOrCreateInstance(e).toggle(this)})),Ee(Ln),Qt(Ln);const Sn=".bs.offcanvas",Dn=".data-api",$n=`load${Sn}${Dn}`,In="show",Nn="showing",Pn="hiding",Mn=".offcanvas.show",jn=`show${Sn}`,Fn=`shown${Sn}`,Hn=`hide${Sn}`,Bn=`hidePrevented${Sn}`,Wn=`hidden${Sn}`,zn=`resize${Sn}`,Rn=`click${Sn}${Dn}`,qn=`keydown.dismiss${Sn}`,Vn={backdrop:!0,keyboard:!0,scroll:!1},Yn={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class Kn extends ve{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return Vn}static get DefaultType(){return Yn}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||fe.trigger(this._element,jn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new un).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(Nn),this._queueCallback((()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add(In),this._element.classList.remove(Nn),fe.trigger(this._element,Fn,{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(fe.trigger(this._element,Hn).defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add(Pn),this._backdrop.hide(),this._queueCallback((()=>{this._element.classList.remove(In,Pn),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new un).reset(),fe.trigger(this._element,Wn)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){const t=Boolean(this._config.backdrop);return new Zi({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"!==this._config.backdrop?this.hide():fe.trigger(this._element,Bn)}:null})}_initializeFocusTrap(){return new an({trapElement:this._element})}_addEventListeners(){fe.on(this._element,qn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():fe.trigger(this._element,Bn))}))}static jQueryInterface(t){return this.each((function(){const e=Kn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}fe.on(document,Rn,'[data-bs-toggle="offcanvas"]',(function(t){const e=we.getElementFromSelector(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this))return;fe.one(e,Wn,(()=>{Bt(this)&&this.focus()}));const i=we.findOne(Mn);i&&i!==e&&Kn.getInstance(i).hide(),Kn.getOrCreateInstance(e).toggle(this)})),fe.on(window,$n,(()=>{for(const t of we.find(Mn))Kn.getOrCreateInstance(t).show()})),fe.on(window,zn,(()=>{for(const t of we.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&Kn.getOrCreateInstance(t).hide()})),Ee(Kn),Qt(Kn);const Qn={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],dd:[],div:[],dl:[],dt:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Xn=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Un=/^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i,Gn=(t,e)=>{const i=t.nodeName.toLowerCase();return e.includes(i)?!Xn.has(i)||Boolean(Un.test(t.nodeValue)):e.filter((t=>t instanceof RegExp)).some((t=>t.test(i)))},Jn={allowList:Qn,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Zn={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},ts={entry:"(string|element|function|null)",selector:"(string|element)"};class es extends be{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return Jn}static get DefaultType(){return Zn}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map((t=>this._resolvePossibleFunction(t))).filter(Boolean)}hasContent(){return this.getContent().length>0}changeContent(t){return this._checkContent(t),this._config.content={...this._config.content,...t},this}toHtml(){const t=document.createElement("div");t.innerHTML=this._maybeSanitize(this._config.template);for(const[e,i]of Object.entries(this._config.content))this._setContent(t,i,e);const e=t.children[0],i=this._resolvePossibleFunction(this._config.extraClass);return i&&e.classList.add(...i.split(" ")),e}_typeCheckConfig(t){super._typeCheckConfig(t),this._checkContent(t.content)}_checkContent(t){for(const[e,i]of Object.entries(t))super._typeCheckConfig({selector:e,entry:i},ts)}_setContent(t,e,i){const n=we.findOne(i,t);n&&((e=this._resolvePossibleFunction(e))?Ft(e)?this._putElementInTemplate(Ht(e),n):this._config.html?n.innerHTML=this._maybeSanitize(e):n.textContent=e:n.remove())}_maybeSanitize(t){return this._config.sanitize?function(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const n=(new window.DOMParser).parseFromString(t,"text/html"),s=[].concat(...n.body.querySelectorAll("*"));for(const t of s){const i=t.nodeName.toLowerCase();if(!Object.keys(e).includes(i)){t.remove();continue}const n=[].concat(...t.attributes),s=[].concat(e["*"]||[],e[i]||[]);for(const e of n)Gn(e,s)||t.removeAttribute(e.nodeName)}return n.body.innerHTML}(t,this._config.allowList,this._config.sanitizeFn):t}_resolvePossibleFunction(t){return Xt(t,[this])}_putElementInTemplate(t,e){if(this._config.html)return e.innerHTML="",void e.append(t);e.textContent=t.textContent}}const is=new Set(["sanitize","allowList","sanitizeFn"]),ns="fade",ss="show",os=".tooltip-inner",rs=".modal",as="hide.bs.modal",ls="hover",cs="focus",hs={AUTO:"auto",TOP:"top",RIGHT:Kt()?"left":"right",BOTTOM:"bottom",LEFT:Kt()?"right":"left"},ds={allowList:Qn,animation:!0,boundary:"clippingParents",container:!1,customClass:"",delay:0,fallbackPlacements:["top","right","bottom","left"],html:!1,offset:[0,6],placement:"top",popperConfig:null,sanitize:!0,sanitizeFn:null,selector:!1,template:'',title:"",trigger:"hover focus"},us={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class fs extends ve{constructor(t,i){if(void 0===e)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,i),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return ds}static get DefaultType(){return us}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),fe.off(this._element.closest(rs),as,this._hideModalHandler),this._element.getAttribute("data-bs-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-bs-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this._isWithContent()||!this._isEnabled)return;const t=fe.trigger(this._element,this.constructor.eventName("show")),e=(zt(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(t.defaultPrevented||!e)return;this._disposePopper();const i=this._getTipElement();this._element.setAttribute("aria-describedby",i.getAttribute("id"));const{container:n}=this._config;if(this._element.ownerDocument.documentElement.contains(this.tip)||(n.append(i),fe.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(i),i.classList.add(ss),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._queueCallback((()=>{fe.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1}),this.tip,this._isAnimated())}hide(){if(this._isShown()&&!fe.trigger(this._element,this.constructor.eventName("hide")).defaultPrevented){if(this._getTipElement().classList.remove(ss),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._activeTrigger.click=!1,this._activeTrigger[cs]=!1,this._activeTrigger[ls]=!1,this._isHovered=null,this._queueCallback((()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),fe.trigger(this._element,this.constructor.eventName("hidden")))}),this.tip,this._isAnimated())}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){const e=this._getTemplateFactory(t).toHtml();if(!e)return null;e.classList.remove(ns,ss),e.classList.add(`bs-${this.constructor.NAME}-auto`);const i=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME).toString();return e.setAttribute("id",i),this._isAnimated()&&e.classList.add(ns),e}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new es({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{[os]:this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-bs-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(ns)}_isShown(){return this.tip&&this.tip.classList.contains(ss)}_createPopper(t){const e=Xt(this._config.placement,[this,t,this._element]),i=hs[e.toUpperCase()];return Dt(this._element,t,this._getPopperConfig(i))}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return Xt(t,[this._element])}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...e,...Xt(this._config.popperConfig,[e])}}_setListeners(){const t=this._config.trigger.split(" ");for(const e of t)if("click"===e)fe.on(this._element,this.constructor.eventName("click"),this._config.selector,(t=>{this._initializeOnDelegatedTarget(t).toggle()}));else if("manual"!==e){const t=e===ls?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),i=e===ls?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout");fe.on(this._element,t,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?cs:ls]=!0,e._enter()})),fe.on(this._element,i,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?cs:ls]=e._element.contains(t.relatedTarget),e._leave()}))}this._hideModalHandler=()=>{this._element&&this.hide()},fe.on(this._element.closest(rs),as,this._hideModalHandler)}_fixTitle(){const t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-bs-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout((()=>{this._isHovered&&this.show()}),this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout((()=>{this._isHovered||this.hide()}),this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){const e=_e.getDataAttributes(this._element);for(const t of Object.keys(e))is.has(t)&&delete e[t];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:Ht(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){const t={};for(const[e,i]of Object.entries(this._config))this.constructor.Default[e]!==i&&(t[e]=i);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(t){return this.each((function(){const e=fs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(fs);const ps=".popover-header",ms=".popover-body",gs={...fs.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},_s={...fs.DefaultType,content:"(null|string|element|function)"};class bs extends fs{static get Default(){return gs}static get DefaultType(){return _s}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{[ps]:this._getTitle(),[ms]:this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(t){return this.each((function(){const e=bs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(bs);const vs=".bs.scrollspy",ys=`activate${vs}`,ws=`click${vs}`,Es=`load${vs}.data-api`,As="active",Ts="[href]",Cs=".nav-link",Os=`${Cs}, .nav-item > ${Cs}, .list-group-item`,xs={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},ks={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class Ls extends ve{constructor(t,e){super(t,e),this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh()}static get Default(){return xs}static get DefaultType(){return ks}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=Ht(t.target)||document.body,t.rootMargin=t.offset?`${t.offset}px 0px -30%`:t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map((t=>Number.parseFloat(t)))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(fe.off(this._config.target,ws),fe.on(this._config.target,ws,Ts,(t=>{const e=this._observableSections.get(t.target.hash);if(e){t.preventDefault();const i=this._rootElement||window,n=e.offsetTop-this._element.offsetTop;if(i.scrollTo)return void i.scrollTo({top:n,behavior:"smooth"});i.scrollTop=n}})))}_getNewObserver(){const t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver((t=>this._observerCallback(t)),t)}_observerCallback(t){const e=t=>this._targetLinks.get(`#${t.target.id}`),i=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},n=(this._rootElement||document.documentElement).scrollTop,s=n>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=n;for(const o of t){if(!o.isIntersecting){this._activeTarget=null,this._clearActiveClass(e(o));continue}const t=o.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(s&&t){if(i(o),!n)return}else s||t||i(o)}}_initializeTargetsAndObservables(){this._targetLinks=new Map,this._observableSections=new Map;const t=we.find(Ts,this._config.target);for(const e of t){if(!e.hash||Wt(e))continue;const t=we.findOne(decodeURI(e.hash),this._element);Bt(t)&&(this._targetLinks.set(decodeURI(e.hash),e),this._observableSections.set(e.hash,t))}}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),this._activeTarget=t,t.classList.add(As),this._activateParents(t),fe.trigger(this._element,ys,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))we.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(As);else for(const e of we.parents(t,".nav, .list-group"))for(const t of we.prev(e,Os))t.classList.add(As)}_clearActiveClass(t){t.classList.remove(As);const e=we.find(`${Ts}.${As}`,t);for(const t of e)t.classList.remove(As)}static jQueryInterface(t){return this.each((function(){const e=Ls.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(window,Es,(()=>{for(const t of we.find('[data-bs-spy="scroll"]'))Ls.getOrCreateInstance(t)})),Qt(Ls);const Ss=".bs.tab",Ds=`hide${Ss}`,$s=`hidden${Ss}`,Is=`show${Ss}`,Ns=`shown${Ss}`,Ps=`click${Ss}`,Ms=`keydown${Ss}`,js=`load${Ss}`,Fs="ArrowLeft",Hs="ArrowRight",Bs="ArrowUp",Ws="ArrowDown",zs="Home",Rs="End",qs="active",Vs="fade",Ys="show",Ks=".dropdown-toggle",Qs=`:not(${Ks})`,Xs='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',Us=`.nav-link${Qs}, .list-group-item${Qs}, [role="tab"]${Qs}, ${Xs}`,Gs=`.${qs}[data-bs-toggle="tab"], .${qs}[data-bs-toggle="pill"], .${qs}[data-bs-toggle="list"]`;class Js extends ve{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),fe.on(this._element,Ms,(t=>this._keydown(t))))}static get NAME(){return"tab"}show(){const t=this._element;if(this._elemIsActive(t))return;const e=this._getActiveElem(),i=e?fe.trigger(e,Ds,{relatedTarget:t}):null;fe.trigger(t,Is,{relatedTarget:e}).defaultPrevented||i&&i.defaultPrevented||(this._deactivate(e,t),this._activate(t,e))}_activate(t,e){t&&(t.classList.add(qs),this._activate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),fe.trigger(t,Ns,{relatedTarget:e})):t.classList.add(Ys)}),t,t.classList.contains(Vs)))}_deactivate(t,e){t&&(t.classList.remove(qs),t.blur(),this._deactivate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),fe.trigger(t,$s,{relatedTarget:e})):t.classList.remove(Ys)}),t,t.classList.contains(Vs)))}_keydown(t){if(![Fs,Hs,Bs,Ws,zs,Rs].includes(t.key))return;t.stopPropagation(),t.preventDefault();const e=this._getChildren().filter((t=>!Wt(t)));let i;if([zs,Rs].includes(t.key))i=e[t.key===zs?0:e.length-1];else{const n=[Hs,Ws].includes(t.key);i=Gt(e,t.target,n,!0)}i&&(i.focus({preventScroll:!0}),Js.getOrCreateInstance(i).show())}_getChildren(){return we.find(Us,this._parent)}_getActiveElem(){return this._getChildren().find((t=>this._elemIsActive(t)))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const t of e)this._setInitialAttributesOnChild(t)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);const e=this._elemIsActive(t),i=this._getOuterElement(t);t.setAttribute("aria-selected",e),i!==t&&this._setAttributeIfNotExists(i,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){const e=we.getElementFromSelector(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id&&this._setAttributeIfNotExists(e,"aria-labelledby",`${t.id}`))}_toggleDropDown(t,e){const i=this._getOuterElement(t);if(!i.classList.contains("dropdown"))return;const n=(t,n)=>{const s=we.findOne(t,i);s&&s.classList.toggle(n,e)};n(Ks,qs),n(".dropdown-menu",Ys),i.setAttribute("aria-expanded",e)}_setAttributeIfNotExists(t,e,i){t.hasAttribute(e)||t.setAttribute(e,i)}_elemIsActive(t){return t.classList.contains(qs)}_getInnerElement(t){return t.matches(Us)?t:we.findOne(Us,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(t){return this.each((function(){const e=Js.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(document,Ps,Xs,(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this)||Js.getOrCreateInstance(this).show()})),fe.on(window,js,(()=>{for(const t of we.find(Gs))Js.getOrCreateInstance(t)})),Qt(Js);const Zs=".bs.toast",to=`mouseover${Zs}`,eo=`mouseout${Zs}`,io=`focusin${Zs}`,no=`focusout${Zs}`,so=`hide${Zs}`,oo=`hidden${Zs}`,ro=`show${Zs}`,ao=`shown${Zs}`,lo="hide",co="show",ho="showing",uo={animation:"boolean",autohide:"boolean",delay:"number"},fo={animation:!0,autohide:!0,delay:5e3};class po extends ve{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return fo}static get DefaultType(){return uo}static get NAME(){return"toast"}show(){fe.trigger(this._element,ro).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(lo),qt(this._element),this._element.classList.add(co,ho),this._queueCallback((()=>{this._element.classList.remove(ho),fe.trigger(this._element,ao),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this.isShown()&&(fe.trigger(this._element,so).defaultPrevented||(this._element.classList.add(ho),this._queueCallback((()=>{this._element.classList.add(lo),this._element.classList.remove(ho,co),fe.trigger(this._element,oo)}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(co),super.dispose()}isShown(){return this._element.classList.contains(co)}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){fe.on(this._element,to,(t=>this._onInteraction(t,!0))),fe.on(this._element,eo,(t=>this._onInteraction(t,!1))),fe.on(this._element,io,(t=>this._onInteraction(t,!0))),fe.on(this._element,no,(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=po.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}function mo(t){"loading"!=document.readyState?t():document.addEventListener("DOMContentLoaded",t)}Ee(po),Qt(po),mo((function(){[].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')).map((function(t){return new fs(t,{delay:{show:500,hide:100}})}))})),mo((function(){document.getElementById("pst-back-to-top").addEventListener("click",(function(){document.body.scrollTop=0,document.documentElement.scrollTop=0}))})),mo((function(){var t=document.getElementById("pst-back-to-top"),e=document.getElementsByClassName("bd-header")[0].getBoundingClientRect();window.addEventListener("scroll",(function(){this.oldScroll>this.scrollY&&this.scrollY>e.bottom?t.style.display="block":t.style.display="none",this.oldScroll=this.scrollY}))})),window.bootstrap=i})(); +//# sourceMappingURL=bootstrap.js.map \ No newline at end of file diff --git a/sed/2.1.0/_static/scripts/bootstrap.js.LICENSE.txt b/sed/2.1.0/_static/scripts/bootstrap.js.LICENSE.txt new file mode 100644 index 0000000..28755c2 --- /dev/null +++ b/sed/2.1.0/_static/scripts/bootstrap.js.LICENSE.txt @@ -0,0 +1,5 @@ +/*! + * Bootstrap v5.3.3 (https://getbootstrap.com/) + * Copyright 2011-2024 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ diff --git a/sed/2.1.0/_static/scripts/bootstrap.js.map b/sed/2.1.0/_static/scripts/bootstrap.js.map new file mode 100644 index 0000000..4a3502a --- /dev/null +++ b/sed/2.1.0/_static/scripts/bootstrap.js.map @@ -0,0 +1 @@ +{"version":3,"file":"scripts/bootstrap.js","mappings":";mBACA,IAAIA,EAAsB,CCA1BA,EAAwB,CAACC,EAASC,KACjC,IAAI,IAAIC,KAAOD,EACXF,EAAoBI,EAAEF,EAAYC,KAASH,EAAoBI,EAAEH,EAASE,IAC5EE,OAAOC,eAAeL,EAASE,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,IAE1E,ECNDH,EAAwB,CAACS,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClFV,EAAyBC,IACH,oBAAXa,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeL,EAASa,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeL,EAAS,aAAc,CAAEe,OAAO,GAAO,01BCLvD,IAAI,EAAM,MACNC,EAAS,SACTC,EAAQ,QACRC,EAAO,OACPC,EAAO,OACPC,EAAiB,CAAC,EAAKJ,EAAQC,EAAOC,GACtCG,EAAQ,QACRC,EAAM,MACNC,EAAkB,kBAClBC,EAAW,WACXC,EAAS,SACTC,EAAY,YACZC,EAAmCP,EAAeQ,QAAO,SAAUC,EAAKC,GACjF,OAAOD,EAAIE,OAAO,CAACD,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAChE,GAAG,IACQ,EAA0B,GAAGS,OAAOX,EAAgB,CAACD,IAAOS,QAAO,SAAUC,EAAKC,GAC3F,OAAOD,EAAIE,OAAO,CAACD,EAAWA,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAC3E,GAAG,IAEQU,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAc,cACdC,EAAQ,QACRC,EAAa,aACbC,EAAiB,CAACT,EAAYC,EAAMC,EAAWC,EAAYC,EAAMC,EAAWC,EAAaC,EAAOC,GC9B5F,SAASE,EAAYC,GAClC,OAAOA,GAAWA,EAAQC,UAAY,IAAIC,cAAgB,IAC5D,CCFe,SAASC,EAAUC,GAChC,GAAY,MAARA,EACF,OAAOC,OAGT,GAAwB,oBAApBD,EAAKE,WAAkC,CACzC,IAAIC,EAAgBH,EAAKG,cACzB,OAAOA,GAAgBA,EAAcC,aAAwBH,MAC/D,CAEA,OAAOD,CACT,CCTA,SAASK,EAAUL,GAEjB,OAAOA,aADUD,EAAUC,GAAMM,SACIN,aAAgBM,OACvD,CAEA,SAASC,EAAcP,GAErB,OAAOA,aADUD,EAAUC,GAAMQ,aACIR,aAAgBQ,WACvD,CAEA,SAASC,EAAaT,GAEpB,MAA0B,oBAAfU,aAKJV,aADUD,EAAUC,GAAMU,YACIV,aAAgBU,WACvD,CCwDA,SACEC,KAAM,cACNC,SAAS,EACTC,MAAO,QACPC,GA5EF,SAAqBC,GACnB,IAAIC,EAAQD,EAAKC,MACjB3D,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIS,EAAQJ,EAAMK,OAAOV,IAAS,CAAC,EAC/BW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EACxCf,EAAUoB,EAAME,SAASP,GAExBJ,EAAcX,IAAaD,EAAYC,KAO5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUR,GACxC,IAAI3C,EAAQsD,EAAWX,IAET,IAAV3C,EACF4B,EAAQ4B,gBAAgBb,GAExBf,EAAQ6B,aAAad,GAAgB,IAAV3C,EAAiB,GAAKA,EAErD,IACF,GACF,EAoDE0D,OAlDF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MACdY,EAAgB,CAClBlD,OAAQ,CACNmD,SAAUb,EAAMc,QAAQC,SACxB5D,KAAM,IACN6D,IAAK,IACLC,OAAQ,KAEVC,MAAO,CACLL,SAAU,YAEZlD,UAAW,CAAC,GASd,OAPAtB,OAAOkE,OAAOP,EAAME,SAASxC,OAAO0C,MAAOQ,EAAclD,QACzDsC,EAAMK,OAASO,EAEXZ,EAAME,SAASgB,OACjB7E,OAAOkE,OAAOP,EAAME,SAASgB,MAAMd,MAAOQ,EAAcM,OAGnD,WACL7E,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIf,EAAUoB,EAAME,SAASP,GACzBW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EAGxCS,EAFkB/D,OAAO4D,KAAKD,EAAMK,OAAOzD,eAAe+C,GAAQK,EAAMK,OAAOV,GAAQiB,EAAcjB,IAE7E9B,QAAO,SAAUuC,EAAOe,GAElD,OADAf,EAAMe,GAAY,GACXf,CACT,GAAG,CAAC,GAECb,EAAcX,IAAaD,EAAYC,KAI5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUiB,GACxCxC,EAAQ4B,gBAAgBY,EAC1B,IACF,GACF,CACF,EASEC,SAAU,CAAC,kBCjFE,SAASC,EAAiBvD,GACvC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCHO,IAAI,EAAMC,KAAKC,IACX,EAAMD,KAAKE,IACXC,EAAQH,KAAKG,MCFT,SAASC,IACtB,IAAIC,EAASC,UAAUC,cAEvB,OAAc,MAAVF,GAAkBA,EAAOG,QAAUC,MAAMC,QAAQL,EAAOG,QACnDH,EAAOG,OAAOG,KAAI,SAAUC,GACjC,OAAOA,EAAKC,MAAQ,IAAMD,EAAKE,OACjC,IAAGC,KAAK,KAGHT,UAAUU,SACnB,CCTe,SAASC,IACtB,OAAQ,iCAAiCC,KAAKd,IAChD,CCCe,SAASe,EAAsB/D,EAASgE,EAAcC,QAC9C,IAAjBD,IACFA,GAAe,QAGO,IAApBC,IACFA,GAAkB,GAGpB,IAAIC,EAAalE,EAAQ+D,wBACrBI,EAAS,EACTC,EAAS,EAETJ,GAAgBrD,EAAcX,KAChCmE,EAASnE,EAAQqE,YAAc,GAAItB,EAAMmB,EAAWI,OAAStE,EAAQqE,aAAmB,EACxFD,EAASpE,EAAQuE,aAAe,GAAIxB,EAAMmB,EAAWM,QAAUxE,EAAQuE,cAAoB,GAG7F,IACIE,GADOhE,EAAUT,GAAWG,EAAUH,GAAWK,QAC3BoE,eAEtBC,GAAoBb,KAAsBI,EAC1CU,GAAKT,EAAW3F,MAAQmG,GAAoBD,EAAiBA,EAAeG,WAAa,IAAMT,EAC/FU,GAAKX,EAAW9B,KAAOsC,GAAoBD,EAAiBA,EAAeK,UAAY,IAAMV,EAC7FE,EAAQJ,EAAWI,MAAQH,EAC3BK,EAASN,EAAWM,OAASJ,EACjC,MAAO,CACLE,MAAOA,EACPE,OAAQA,EACRpC,IAAKyC,EACLvG,MAAOqG,EAAIL,EACXjG,OAAQwG,EAAIL,EACZjG,KAAMoG,EACNA,EAAGA,EACHE,EAAGA,EAEP,CCrCe,SAASE,EAAc/E,GACpC,IAAIkE,EAAaH,EAAsB/D,GAGnCsE,EAAQtE,EAAQqE,YAChBG,EAASxE,EAAQuE,aAUrB,OARI3B,KAAKoC,IAAId,EAAWI,MAAQA,IAAU,IACxCA,EAAQJ,EAAWI,OAGjB1B,KAAKoC,IAAId,EAAWM,OAASA,IAAW,IAC1CA,EAASN,EAAWM,QAGf,CACLG,EAAG3E,EAAQ4E,WACXC,EAAG7E,EAAQ8E,UACXR,MAAOA,EACPE,OAAQA,EAEZ,CCvBe,SAASS,EAASC,EAAQC,GACvC,IAAIC,EAAWD,EAAME,aAAeF,EAAME,cAE1C,GAAIH,EAAOD,SAASE,GAClB,OAAO,EAEJ,GAAIC,GAAYvE,EAAauE,GAAW,CACzC,IAAIE,EAAOH,EAEX,EAAG,CACD,GAAIG,GAAQJ,EAAOK,WAAWD,GAC5B,OAAO,EAITA,EAAOA,EAAKE,YAAcF,EAAKG,IACjC,OAASH,EACX,CAGF,OAAO,CACT,CCrBe,SAAS,EAAiBtF,GACvC,OAAOG,EAAUH,GAAS0F,iBAAiB1F,EAC7C,CCFe,SAAS2F,EAAe3F,GACrC,MAAO,CAAC,QAAS,KAAM,MAAM4F,QAAQ7F,EAAYC,KAAa,CAChE,CCFe,SAAS6F,EAAmB7F,GAEzC,QAASS,EAAUT,GAAWA,EAAQO,cACtCP,EAAQ8F,WAAazF,OAAOyF,UAAUC,eACxC,CCFe,SAASC,EAAchG,GACpC,MAA6B,SAAzBD,EAAYC,GACPA,EAMPA,EAAQiG,cACRjG,EAAQwF,aACR3E,EAAab,GAAWA,EAAQyF,KAAO,OAEvCI,EAAmB7F,EAGvB,CCVA,SAASkG,EAAoBlG,GAC3B,OAAKW,EAAcX,IACoB,UAAvC,EAAiBA,GAASiC,SAInBjC,EAAQmG,aAHN,IAIX,CAwCe,SAASC,EAAgBpG,GAItC,IAHA,IAAIK,EAASF,EAAUH,GACnBmG,EAAeD,EAAoBlG,GAEhCmG,GAAgBR,EAAeQ,IAA6D,WAA5C,EAAiBA,GAAclE,UACpFkE,EAAeD,EAAoBC,GAGrC,OAAIA,IAA+C,SAA9BpG,EAAYoG,IAA0D,SAA9BpG,EAAYoG,IAAwE,WAA5C,EAAiBA,GAAclE,UAC3H5B,EAGF8F,GAhDT,SAA4BnG,GAC1B,IAAIqG,EAAY,WAAWvC,KAAKd,KAGhC,GAFW,WAAWc,KAAKd,MAEfrC,EAAcX,IAII,UAFX,EAAiBA,GAEnBiC,SACb,OAAO,KAIX,IAAIqE,EAAcN,EAAchG,GAMhC,IAJIa,EAAayF,KACfA,EAAcA,EAAYb,MAGrB9E,EAAc2F,IAAgB,CAAC,OAAQ,QAAQV,QAAQ7F,EAAYuG,IAAgB,GAAG,CAC3F,IAAIC,EAAM,EAAiBD,GAI3B,GAAsB,SAAlBC,EAAIC,WAA4C,SAApBD,EAAIE,aAA0C,UAAhBF,EAAIG,UAAiF,IAA1D,CAAC,YAAa,eAAed,QAAQW,EAAII,aAAsBN,GAAgC,WAAnBE,EAAII,YAA2BN,GAAaE,EAAIK,QAAyB,SAAfL,EAAIK,OACjO,OAAON,EAEPA,EAAcA,EAAYd,UAE9B,CAEA,OAAO,IACT,CAgByBqB,CAAmB7G,IAAYK,CACxD,CCpEe,SAASyG,EAAyB3H,GAC/C,MAAO,CAAC,MAAO,UAAUyG,QAAQzG,IAAc,EAAI,IAAM,GAC3D,CCDO,SAAS4H,EAAOjE,EAAK1E,EAAOyE,GACjC,OAAO,EAAQC,EAAK,EAAQ1E,EAAOyE,GACrC,CCFe,SAASmE,EAAmBC,GACzC,OAAOxJ,OAAOkE,OAAO,CAAC,ECDf,CACLS,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GDHuC0I,EACjD,CEHe,SAASC,EAAgB9I,EAAOiD,GAC7C,OAAOA,EAAKpC,QAAO,SAAUkI,EAAS5J,GAEpC,OADA4J,EAAQ5J,GAAOa,EACR+I,CACT,GAAG,CAAC,EACN,CC4EA,SACEpG,KAAM,QACNC,SAAS,EACTC,MAAO,OACPC,GApEF,SAAeC,GACb,IAAIiG,EAEAhG,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZmB,EAAUf,EAAKe,QACfmF,EAAejG,EAAME,SAASgB,MAC9BgF,EAAgBlG,EAAMmG,cAAcD,cACpCE,EAAgB9E,EAAiBtB,EAAMjC,WACvCsI,EAAOX,EAAyBU,GAEhCE,EADa,CAACnJ,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAClC,SAAW,QAElC,GAAKH,GAAiBC,EAAtB,CAIA,IAAIL,EAxBgB,SAAyBU,EAASvG,GAItD,OAAO4F,EAAsC,iBAH7CW,EAA6B,mBAAZA,EAAyBA,EAAQlK,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CAC/EzI,UAAWiC,EAAMjC,aACbwI,GACkDA,EAAUT,EAAgBS,EAASlJ,GAC7F,CAmBsBoJ,CAAgB3F,EAAQyF,QAASvG,GACjD0G,EAAY/C,EAAcsC,GAC1BU,EAAmB,MAATN,EAAe,EAAMlJ,EAC/ByJ,EAAmB,MAATP,EAAepJ,EAASC,EAClC2J,EAAU7G,EAAMwG,MAAM7I,UAAU2I,GAAOtG,EAAMwG,MAAM7I,UAAU0I,GAAQH,EAAcG,GAAQrG,EAAMwG,MAAM9I,OAAO4I,GAC9GQ,EAAYZ,EAAcG,GAAQrG,EAAMwG,MAAM7I,UAAU0I,GACxDU,EAAoB/B,EAAgBiB,GACpCe,EAAaD,EAA6B,MAATV,EAAeU,EAAkBE,cAAgB,EAAIF,EAAkBG,aAAe,EAAI,EAC3HC,EAAoBN,EAAU,EAAIC,EAAY,EAG9CpF,EAAMmE,EAAcc,GACpBlF,EAAMuF,EAAaN,EAAUJ,GAAOT,EAAce,GAClDQ,EAASJ,EAAa,EAAIN,EAAUJ,GAAO,EAAIa,EAC/CE,EAAS1B,EAAOjE,EAAK0F,EAAQ3F,GAE7B6F,EAAWjB,EACfrG,EAAMmG,cAAcxG,KAASqG,EAAwB,CAAC,GAAyBsB,GAAYD,EAAQrB,EAAsBuB,aAAeF,EAASD,EAAQpB,EAnBzJ,CAoBF,EAkCEtF,OAhCF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MAEdwH,EADU7G,EAAMG,QACWlC,QAC3BqH,OAAoC,IAArBuB,EAA8B,sBAAwBA,EAErD,MAAhBvB,IAKwB,iBAAjBA,IACTA,EAAejG,EAAME,SAASxC,OAAO+J,cAAcxB,MAOhDpC,EAAS7D,EAAME,SAASxC,OAAQuI,KAIrCjG,EAAME,SAASgB,MAAQ+E,EACzB,EASE5E,SAAU,CAAC,iBACXqG,iBAAkB,CAAC,oBCxFN,SAASC,EAAa5J,GACnC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCOA,IAAIqG,GAAa,CACf5G,IAAK,OACL9D,MAAO,OACPD,OAAQ,OACRE,KAAM,QAeD,SAAS0K,GAAYlH,GAC1B,IAAImH,EAEApK,EAASiD,EAAMjD,OACfqK,EAAapH,EAAMoH,WACnBhK,EAAY4C,EAAM5C,UAClBiK,EAAYrH,EAAMqH,UAClBC,EAAUtH,EAAMsH,QAChBpH,EAAWF,EAAME,SACjBqH,EAAkBvH,EAAMuH,gBACxBC,EAAWxH,EAAMwH,SACjBC,EAAezH,EAAMyH,aACrBC,EAAU1H,EAAM0H,QAChBC,EAAaL,EAAQ1E,EACrBA,OAAmB,IAAf+E,EAAwB,EAAIA,EAChCC,EAAaN,EAAQxE,EACrBA,OAAmB,IAAf8E,EAAwB,EAAIA,EAEhCC,EAAgC,mBAAjBJ,EAA8BA,EAAa,CAC5D7E,EAAGA,EACHE,IACG,CACHF,EAAGA,EACHE,GAGFF,EAAIiF,EAAMjF,EACVE,EAAI+E,EAAM/E,EACV,IAAIgF,EAAOR,EAAQrL,eAAe,KAC9B8L,EAAOT,EAAQrL,eAAe,KAC9B+L,EAAQxL,EACRyL,EAAQ,EACRC,EAAM5J,OAEV,GAAIkJ,EAAU,CACZ,IAAIpD,EAAeC,EAAgBtH,GAC/BoL,EAAa,eACbC,EAAY,cAEZhE,IAAiBhG,EAAUrB,IAGmB,WAA5C,EAFJqH,EAAeN,EAAmB/G,IAECmD,UAAsC,aAAbA,IAC1DiI,EAAa,eACbC,EAAY,gBAOZhL,IAAc,IAAQA,IAAcZ,GAAQY,IAAcb,IAAU8K,IAAczK,KACpFqL,EAAQ3L,EAGRwG,IAFc4E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeD,OACzF2B,EAAa+D,IACEf,EAAW3E,OAC1BK,GAAKyE,EAAkB,GAAK,GAG1BnK,IAAcZ,IAASY,IAAc,GAAOA,IAAcd,GAAW+K,IAAczK,KACrFoL,EAAQzL,EAGRqG,IAFc8E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeH,MACzF6B,EAAagE,IACEhB,EAAW7E,MAC1BK,GAAK2E,EAAkB,GAAK,EAEhC,CAEA,IAgBMc,EAhBFC,EAAe5M,OAAOkE,OAAO,CAC/BM,SAAUA,GACTsH,GAAYP,IAEXsB,GAAyB,IAAjBd,EAlFd,SAA2BrI,EAAM8I,GAC/B,IAAItF,EAAIxD,EAAKwD,EACTE,EAAI1D,EAAK0D,EACT0F,EAAMN,EAAIO,kBAAoB,EAClC,MAAO,CACL7F,EAAG5B,EAAM4B,EAAI4F,GAAOA,GAAO,EAC3B1F,EAAG9B,EAAM8B,EAAI0F,GAAOA,GAAO,EAE/B,CA0EsCE,CAAkB,CACpD9F,EAAGA,EACHE,GACC1E,EAAUrB,IAAW,CACtB6F,EAAGA,EACHE,GAMF,OAHAF,EAAI2F,EAAM3F,EACVE,EAAIyF,EAAMzF,EAENyE,EAGK7L,OAAOkE,OAAO,CAAC,EAAG0I,IAAeD,EAAiB,CAAC,GAAkBJ,GAASF,EAAO,IAAM,GAAIM,EAAeL,GAASF,EAAO,IAAM,GAAIO,EAAe5D,WAAayD,EAAIO,kBAAoB,IAAM,EAAI,aAAe7F,EAAI,OAASE,EAAI,MAAQ,eAAiBF,EAAI,OAASE,EAAI,SAAUuF,IAG5R3M,OAAOkE,OAAO,CAAC,EAAG0I,IAAenB,EAAkB,CAAC,GAAmBc,GAASF,EAAOjF,EAAI,KAAO,GAAIqE,EAAgBa,GAASF,EAAOlF,EAAI,KAAO,GAAIuE,EAAgB1C,UAAY,GAAI0C,GAC9L,CA4CA,UACEnI,KAAM,gBACNC,SAAS,EACTC,MAAO,cACPC,GA9CF,SAAuBwJ,GACrB,IAAItJ,EAAQsJ,EAAMtJ,MACdc,EAAUwI,EAAMxI,QAChByI,EAAwBzI,EAAQoH,gBAChCA,OAA4C,IAA1BqB,GAA0CA,EAC5DC,EAAoB1I,EAAQqH,SAC5BA,OAAiC,IAAtBqB,GAAsCA,EACjDC,EAAwB3I,EAAQsH,aAChCA,OAAyC,IAA1BqB,GAA0CA,EACzDR,EAAe,CACjBlL,UAAWuD,EAAiBtB,EAAMjC,WAClCiK,UAAWL,EAAa3H,EAAMjC,WAC9BL,OAAQsC,EAAME,SAASxC,OACvBqK,WAAY/H,EAAMwG,MAAM9I,OACxBwK,gBAAiBA,EACjBG,QAAoC,UAA3BrI,EAAMc,QAAQC,UAGgB,MAArCf,EAAMmG,cAAcD,gBACtBlG,EAAMK,OAAO3C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAO3C,OAAQmK,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACvGhB,QAASjI,EAAMmG,cAAcD,cAC7BrF,SAAUb,EAAMc,QAAQC,SACxBoH,SAAUA,EACVC,aAAcA,OAIe,MAA7BpI,EAAMmG,cAAcjF,QACtBlB,EAAMK,OAAOa,MAAQ7E,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAOa,MAAO2G,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACrGhB,QAASjI,EAAMmG,cAAcjF,MAC7BL,SAAU,WACVsH,UAAU,EACVC,aAAcA,OAIlBpI,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,wBAAyBsC,EAAMjC,WAEnC,EAQE2L,KAAM,CAAC,GCrKT,IAAIC,GAAU,CACZA,SAAS,GAsCX,UACEhK,KAAM,iBACNC,SAAS,EACTC,MAAO,QACPC,GAAI,WAAe,EACnBY,OAxCF,SAAgBX,GACd,IAAIC,EAAQD,EAAKC,MACb4J,EAAW7J,EAAK6J,SAChB9I,EAAUf,EAAKe,QACf+I,EAAkB/I,EAAQgJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAkBjJ,EAAQkJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7C9K,EAASF,EAAUiB,EAAME,SAASxC,QAClCuM,EAAgB,GAAGjM,OAAOgC,EAAMiK,cAActM,UAAWqC,EAAMiK,cAAcvM,QAYjF,OAVIoM,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaC,iBAAiB,SAAUP,EAASQ,OAAQT,GAC3D,IAGEK,GACF/K,EAAOkL,iBAAiB,SAAUP,EAASQ,OAAQT,IAG9C,WACDG,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaG,oBAAoB,SAAUT,EAASQ,OAAQT,GAC9D,IAGEK,GACF/K,EAAOoL,oBAAoB,SAAUT,EAASQ,OAAQT,GAE1D,CACF,EASED,KAAM,CAAC,GC/CT,IAAIY,GAAO,CACTnN,KAAM,QACND,MAAO,OACPD,OAAQ,MACR+D,IAAK,UAEQ,SAASuJ,GAAqBxM,GAC3C,OAAOA,EAAUyM,QAAQ,0BAA0B,SAAUC,GAC3D,OAAOH,GAAKG,EACd,GACF,CCVA,IAAI,GAAO,CACTnN,MAAO,MACPC,IAAK,SAEQ,SAASmN,GAA8B3M,GACpD,OAAOA,EAAUyM,QAAQ,cAAc,SAAUC,GAC/C,OAAO,GAAKA,EACd,GACF,CCPe,SAASE,GAAgB3L,GACtC,IAAI6J,EAAM9J,EAAUC,GAGpB,MAAO,CACL4L,WAHe/B,EAAIgC,YAInBC,UAHcjC,EAAIkC,YAKtB,CCNe,SAASC,GAAoBpM,GAQ1C,OAAO+D,EAAsB8B,EAAmB7F,IAAUzB,KAAOwN,GAAgB/L,GAASgM,UAC5F,CCXe,SAASK,GAAerM,GAErC,IAAIsM,EAAoB,EAAiBtM,GACrCuM,EAAWD,EAAkBC,SAC7BC,EAAYF,EAAkBE,UAC9BC,EAAYH,EAAkBG,UAElC,MAAO,6BAA6B3I,KAAKyI,EAAWE,EAAYD,EAClE,CCLe,SAASE,GAAgBtM,GACtC,MAAI,CAAC,OAAQ,OAAQ,aAAawF,QAAQ7F,EAAYK,KAAU,EAEvDA,EAAKG,cAAcoM,KAGxBhM,EAAcP,IAASiM,GAAejM,GACjCA,EAGFsM,GAAgB1G,EAAc5F,GACvC,CCJe,SAASwM,GAAkB5M,EAAS6M,GACjD,IAAIC,OAES,IAATD,IACFA,EAAO,IAGT,IAAIvB,EAAeoB,GAAgB1M,GAC/B+M,EAASzB,KAAqE,OAAlDwB,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,MACpH1C,EAAM9J,EAAUmL,GAChB0B,EAASD,EAAS,CAAC9C,GAAK7K,OAAO6K,EAAIxF,gBAAkB,GAAI4H,GAAef,GAAgBA,EAAe,IAAMA,EAC7G2B,EAAcJ,EAAKzN,OAAO4N,GAC9B,OAAOD,EAASE,EAChBA,EAAY7N,OAAOwN,GAAkB5G,EAAcgH,IACrD,CCzBe,SAASE,GAAiBC,GACvC,OAAO1P,OAAOkE,OAAO,CAAC,EAAGwL,EAAM,CAC7B5O,KAAM4O,EAAKxI,EACXvC,IAAK+K,EAAKtI,EACVvG,MAAO6O,EAAKxI,EAAIwI,EAAK7I,MACrBjG,OAAQ8O,EAAKtI,EAAIsI,EAAK3I,QAE1B,CCqBA,SAAS4I,GAA2BpN,EAASqN,EAAgBlL,GAC3D,OAAOkL,IAAmBxO,EAAWqO,GCzBxB,SAAyBlN,EAASmC,GAC/C,IAAI8H,EAAM9J,EAAUH,GAChBsN,EAAOzH,EAAmB7F,GAC1ByE,EAAiBwF,EAAIxF,eACrBH,EAAQgJ,EAAKhF,YACb9D,EAAS8I,EAAKjF,aACd1D,EAAI,EACJE,EAAI,EAER,GAAIJ,EAAgB,CAClBH,EAAQG,EAAeH,MACvBE,EAASC,EAAeD,OACxB,IAAI+I,EAAiB1J,KAEjB0J,IAAmBA,GAA+B,UAAbpL,KACvCwC,EAAIF,EAAeG,WACnBC,EAAIJ,EAAeK,UAEvB,CAEA,MAAO,CACLR,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EAAIyH,GAAoBpM,GAC3B6E,EAAGA,EAEP,CDDwD2I,CAAgBxN,EAASmC,IAAa1B,EAAU4M,GAdxG,SAAoCrN,EAASmC,GAC3C,IAAIgL,EAAOpJ,EAAsB/D,GAAS,EAAoB,UAAbmC,GASjD,OARAgL,EAAK/K,IAAM+K,EAAK/K,IAAMpC,EAAQyN,UAC9BN,EAAK5O,KAAO4O,EAAK5O,KAAOyB,EAAQ0N,WAChCP,EAAK9O,OAAS8O,EAAK/K,IAAMpC,EAAQqI,aACjC8E,EAAK7O,MAAQ6O,EAAK5O,KAAOyB,EAAQsI,YACjC6E,EAAK7I,MAAQtE,EAAQsI,YACrB6E,EAAK3I,OAASxE,EAAQqI,aACtB8E,EAAKxI,EAAIwI,EAAK5O,KACd4O,EAAKtI,EAAIsI,EAAK/K,IACP+K,CACT,CAG0HQ,CAA2BN,EAAgBlL,GAAY+K,GEtBlK,SAAyBlN,GACtC,IAAI8M,EAEAQ,EAAOzH,EAAmB7F,GAC1B4N,EAAY7B,GAAgB/L,GAC5B2M,EAA0D,OAAlDG,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,KAChGrI,EAAQ,EAAIgJ,EAAKO,YAAaP,EAAKhF,YAAaqE,EAAOA,EAAKkB,YAAc,EAAGlB,EAAOA,EAAKrE,YAAc,GACvG9D,EAAS,EAAI8I,EAAKQ,aAAcR,EAAKjF,aAAcsE,EAAOA,EAAKmB,aAAe,EAAGnB,EAAOA,EAAKtE,aAAe,GAC5G1D,GAAKiJ,EAAU5B,WAAaI,GAAoBpM,GAChD6E,GAAK+I,EAAU1B,UAMnB,MAJiD,QAA7C,EAAiBS,GAAQW,GAAMS,YACjCpJ,GAAK,EAAI2I,EAAKhF,YAAaqE,EAAOA,EAAKrE,YAAc,GAAKhE,GAGrD,CACLA,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EACHE,EAAGA,EAEP,CFCkMmJ,CAAgBnI,EAAmB7F,IACrO,CG1Be,SAASiO,GAAe9M,GACrC,IAOIkI,EAPAtK,EAAYoC,EAAKpC,UACjBiB,EAAUmB,EAAKnB,QACfb,EAAYgC,EAAKhC,UACjBqI,EAAgBrI,EAAYuD,EAAiBvD,GAAa,KAC1DiK,EAAYjK,EAAY4J,EAAa5J,GAAa,KAClD+O,EAAUnP,EAAU4F,EAAI5F,EAAUuF,MAAQ,EAAItE,EAAQsE,MAAQ,EAC9D6J,EAAUpP,EAAU8F,EAAI9F,EAAUyF,OAAS,EAAIxE,EAAQwE,OAAS,EAGpE,OAAQgD,GACN,KAAK,EACH6B,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI7E,EAAQwE,QAE3B,MAEF,KAAKnG,EACHgL,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI9F,EAAUyF,QAE7B,MAEF,KAAKlG,EACH+K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI5F,EAAUuF,MAC3BO,EAAGsJ,GAEL,MAEF,KAAK5P,EACH8K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI3E,EAAQsE,MACzBO,EAAGsJ,GAEL,MAEF,QACE9E,EAAU,CACR1E,EAAG5F,EAAU4F,EACbE,EAAG9F,EAAU8F,GAInB,IAAIuJ,EAAW5G,EAAgBV,EAAyBU,GAAiB,KAEzE,GAAgB,MAAZ4G,EAAkB,CACpB,IAAI1G,EAAmB,MAAb0G,EAAmB,SAAW,QAExC,OAAQhF,GACN,KAAK1K,EACH2K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAC7E,MAEF,KAAK/I,EACH0K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAKnF,CAEA,OAAO2B,CACT,CC3De,SAASgF,GAAejN,EAAOc,QAC5B,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACXqM,EAAqBD,EAASnP,UAC9BA,OAAmC,IAAvBoP,EAAgCnN,EAAMjC,UAAYoP,EAC9DC,EAAoBF,EAASnM,SAC7BA,OAAiC,IAAtBqM,EAA+BpN,EAAMe,SAAWqM,EAC3DC,EAAoBH,EAASI,SAC7BA,OAAiC,IAAtBD,EAA+B7P,EAAkB6P,EAC5DE,EAAwBL,EAASM,aACjCA,OAAyC,IAA1BD,EAAmC9P,EAAW8P,EAC7DE,EAAwBP,EAASQ,eACjCA,OAA2C,IAA1BD,EAAmC/P,EAAS+P,EAC7DE,EAAuBT,EAASU,YAChCA,OAAuC,IAAzBD,GAA0CA,EACxDE,EAAmBX,EAAS3G,QAC5BA,OAA+B,IAArBsH,EAA8B,EAAIA,EAC5ChI,EAAgBD,EAAsC,iBAAZW,EAAuBA,EAAUT,EAAgBS,EAASlJ,IACpGyQ,EAAaJ,IAAmBhQ,EAASC,EAAYD,EACrDqK,EAAa/H,EAAMwG,MAAM9I,OACzBkB,EAAUoB,EAAME,SAAS0N,EAAcE,EAAaJ,GACpDK,EJkBS,SAAyBnP,EAAS0O,EAAUE,EAAczM,GACvE,IAAIiN,EAAmC,oBAAbV,EAlB5B,SAA4B1O,GAC1B,IAAIpB,EAAkBgO,GAAkB5G,EAAchG,IAElDqP,EADoB,CAAC,WAAY,SAASzJ,QAAQ,EAAiB5F,GAASiC,WAAa,GACnDtB,EAAcX,GAAWoG,EAAgBpG,GAAWA,EAE9F,OAAKS,EAAU4O,GAKRzQ,EAAgBgI,QAAO,SAAUyG,GACtC,OAAO5M,EAAU4M,IAAmBpI,EAASoI,EAAgBgC,IAAmD,SAAhCtP,EAAYsN,EAC9F,IANS,EAOX,CAK6DiC,CAAmBtP,GAAW,GAAGZ,OAAOsP,GAC/F9P,EAAkB,GAAGQ,OAAOgQ,EAAqB,CAACR,IAClDW,EAAsB3Q,EAAgB,GACtC4Q,EAAe5Q,EAAgBK,QAAO,SAAUwQ,EAASpC,GAC3D,IAAIF,EAAOC,GAA2BpN,EAASqN,EAAgBlL,GAK/D,OAJAsN,EAAQrN,IAAM,EAAI+K,EAAK/K,IAAKqN,EAAQrN,KACpCqN,EAAQnR,MAAQ,EAAI6O,EAAK7O,MAAOmR,EAAQnR,OACxCmR,EAAQpR,OAAS,EAAI8O,EAAK9O,OAAQoR,EAAQpR,QAC1CoR,EAAQlR,KAAO,EAAI4O,EAAK5O,KAAMkR,EAAQlR,MAC/BkR,CACT,GAAGrC,GAA2BpN,EAASuP,EAAqBpN,IAK5D,OAJAqN,EAAalL,MAAQkL,EAAalR,MAAQkR,EAAajR,KACvDiR,EAAahL,OAASgL,EAAanR,OAASmR,EAAapN,IACzDoN,EAAa7K,EAAI6K,EAAajR,KAC9BiR,EAAa3K,EAAI2K,EAAapN,IACvBoN,CACT,CInC2BE,CAAgBjP,EAAUT,GAAWA,EAAUA,EAAQ2P,gBAAkB9J,EAAmBzE,EAAME,SAASxC,QAAS4P,EAAUE,EAAczM,GACjKyN,EAAsB7L,EAAsB3C,EAAME,SAASvC,WAC3DuI,EAAgB2G,GAAe,CACjClP,UAAW6Q,EACX5P,QAASmJ,EACThH,SAAU,WACVhD,UAAWA,IAET0Q,EAAmB3C,GAAiBzP,OAAOkE,OAAO,CAAC,EAAGwH,EAAY7B,IAClEwI,EAAoBhB,IAAmBhQ,EAAS+Q,EAAmBD,EAGnEG,EAAkB,CACpB3N,IAAK+M,EAAmB/M,IAAM0N,EAAkB1N,IAAM6E,EAAc7E,IACpE/D,OAAQyR,EAAkBzR,OAAS8Q,EAAmB9Q,OAAS4I,EAAc5I,OAC7EE,KAAM4Q,EAAmB5Q,KAAOuR,EAAkBvR,KAAO0I,EAAc1I,KACvED,MAAOwR,EAAkBxR,MAAQ6Q,EAAmB7Q,MAAQ2I,EAAc3I,OAExE0R,EAAa5O,EAAMmG,cAAckB,OAErC,GAAIqG,IAAmBhQ,GAAUkR,EAAY,CAC3C,IAAIvH,EAASuH,EAAW7Q,GACxB1B,OAAO4D,KAAK0O,GAAiBxO,SAAQ,SAAUhE,GAC7C,IAAI0S,EAAW,CAAC3R,EAAOD,GAAQuH,QAAQrI,IAAQ,EAAI,GAAK,EACpDkK,EAAO,CAAC,EAAKpJ,GAAQuH,QAAQrI,IAAQ,EAAI,IAAM,IACnDwS,EAAgBxS,IAAQkL,EAAOhB,GAAQwI,CACzC,GACF,CAEA,OAAOF,CACT,CCyEA,UACEhP,KAAM,OACNC,SAAS,EACTC,MAAO,OACPC,GA5HF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KAEhB,IAAIK,EAAMmG,cAAcxG,GAAMmP,MAA9B,CAoCA,IAhCA,IAAIC,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAqCA,EACpDG,EAA8BtO,EAAQuO,mBACtC9I,EAAUzF,EAAQyF,QAClB+G,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtB0B,EAAwBxO,EAAQyO,eAChCA,OAA2C,IAA1BD,GAA0CA,EAC3DE,EAAwB1O,EAAQ0O,sBAChCC,EAAqBzP,EAAMc,QAAQ/C,UACnCqI,EAAgB9E,EAAiBmO,GAEjCJ,EAAqBD,IADHhJ,IAAkBqJ,GACqCF,EAjC/E,SAAuCxR,GACrC,GAAIuD,EAAiBvD,KAAeX,EAClC,MAAO,GAGT,IAAIsS,EAAoBnF,GAAqBxM,GAC7C,MAAO,CAAC2M,GAA8B3M,GAAY2R,EAAmBhF,GAA8BgF,GACrG,CA0B6IC,CAA8BF,GAA3E,CAAClF,GAAqBkF,KAChHG,EAAa,CAACH,GAAoBzR,OAAOqR,GAAoBxR,QAAO,SAAUC,EAAKC,GACrF,OAAOD,EAAIE,OAAOsD,EAAiBvD,KAAeX,ECvCvC,SAA8B4C,EAAOc,QAClC,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACX/C,EAAYmP,EAASnP,UACrBuP,EAAWJ,EAASI,SACpBE,EAAeN,EAASM,aACxBjH,EAAU2G,EAAS3G,QACnBgJ,EAAiBrC,EAASqC,eAC1BM,EAAwB3C,EAASsC,sBACjCA,OAAkD,IAA1BK,EAAmC,EAAgBA,EAC3E7H,EAAYL,EAAa5J,GACzB6R,EAAa5H,EAAYuH,EAAiB3R,EAAsBA,EAAoB4H,QAAO,SAAUzH,GACvG,OAAO4J,EAAa5J,KAAeiK,CACrC,IAAK3K,EACDyS,EAAoBF,EAAWpK,QAAO,SAAUzH,GAClD,OAAOyR,EAAsBhL,QAAQzG,IAAc,CACrD,IAEiC,IAA7B+R,EAAkBC,SACpBD,EAAoBF,GAItB,IAAII,EAAYF,EAAkBjS,QAAO,SAAUC,EAAKC,GAOtD,OANAD,EAAIC,GAAakP,GAAejN,EAAO,CACrCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,IACRjF,EAAiBvD,IACbD,CACT,GAAG,CAAC,GACJ,OAAOzB,OAAO4D,KAAK+P,GAAWC,MAAK,SAAUC,EAAGC,GAC9C,OAAOH,EAAUE,GAAKF,EAAUG,EAClC,GACF,CDC6DC,CAAqBpQ,EAAO,CACnFjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTgJ,eAAgBA,EAChBC,sBAAuBA,IACpBzR,EACP,GAAG,IACCsS,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzB4S,EAAY,IAAIC,IAChBC,GAAqB,EACrBC,EAAwBb,EAAW,GAE9Bc,EAAI,EAAGA,EAAId,EAAWG,OAAQW,IAAK,CAC1C,IAAI3S,EAAY6R,EAAWc,GAEvBC,EAAiBrP,EAAiBvD,GAElC6S,EAAmBjJ,EAAa5J,KAAeT,EAC/CuT,EAAa,CAAC,EAAK5T,GAAQuH,QAAQmM,IAAmB,EACtDrK,EAAMuK,EAAa,QAAU,SAC7B1F,EAAW8B,GAAejN,EAAO,CACnCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdI,YAAaA,EACbrH,QAASA,IAEPuK,EAAoBD,EAAaD,EAAmB1T,EAAQC,EAAOyT,EAAmB3T,EAAS,EAE/FoT,EAAc/J,GAAOyB,EAAWzB,KAClCwK,EAAoBvG,GAAqBuG,IAG3C,IAAIC,EAAmBxG,GAAqBuG,GACxCE,EAAS,GAUb,GARIhC,GACFgC,EAAOC,KAAK9F,EAASwF,IAAmB,GAGtCxB,GACF6B,EAAOC,KAAK9F,EAAS2F,IAAsB,EAAG3F,EAAS4F,IAAqB,GAG1EC,EAAOE,OAAM,SAAUC,GACzB,OAAOA,CACT,IAAI,CACFV,EAAwB1S,EACxByS,GAAqB,EACrB,KACF,CAEAF,EAAUc,IAAIrT,EAAWiT,EAC3B,CAEA,GAAIR,EAqBF,IAnBA,IAEIa,EAAQ,SAAeC,GACzB,IAAIC,EAAmB3B,EAAW4B,MAAK,SAAUzT,GAC/C,IAAIiT,EAASV,EAAU9T,IAAIuB,GAE3B,GAAIiT,EACF,OAAOA,EAAOS,MAAM,EAAGH,GAAIJ,OAAM,SAAUC,GACzC,OAAOA,CACT,GAEJ,IAEA,GAAII,EAEF,OADAd,EAAwBc,EACjB,OAEX,EAESD,EAnBY/B,EAAiB,EAAI,EAmBZ+B,EAAK,GAGpB,UAFFD,EAAMC,GADmBA,KAOpCtR,EAAMjC,YAAc0S,IACtBzQ,EAAMmG,cAAcxG,GAAMmP,OAAQ,EAClC9O,EAAMjC,UAAY0S,EAClBzQ,EAAM0R,OAAQ,EA5GhB,CA8GF,EAQEhK,iBAAkB,CAAC,UACnBgC,KAAM,CACJoF,OAAO,IE7IX,SAAS6C,GAAexG,EAAUY,EAAM6F,GAQtC,YAPyB,IAArBA,IACFA,EAAmB,CACjBrO,EAAG,EACHE,EAAG,IAIA,CACLzC,IAAKmK,EAASnK,IAAM+K,EAAK3I,OAASwO,EAAiBnO,EACnDvG,MAAOiO,EAASjO,MAAQ6O,EAAK7I,MAAQ0O,EAAiBrO,EACtDtG,OAAQkO,EAASlO,OAAS8O,EAAK3I,OAASwO,EAAiBnO,EACzDtG,KAAMgO,EAAShO,KAAO4O,EAAK7I,MAAQ0O,EAAiBrO,EAExD,CAEA,SAASsO,GAAsB1G,GAC7B,MAAO,CAAC,EAAKjO,EAAOD,EAAQE,GAAM2U,MAAK,SAAUC,GAC/C,OAAO5G,EAAS4G,IAAS,CAC3B,GACF,CA+BA,UACEpS,KAAM,OACNC,SAAS,EACTC,MAAO,OACP6H,iBAAkB,CAAC,mBACnB5H,GAlCF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZ0Q,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBkU,EAAmB5R,EAAMmG,cAAc6L,gBACvCC,EAAoBhF,GAAejN,EAAO,CAC5C0N,eAAgB,cAEdwE,EAAoBjF,GAAejN,EAAO,CAC5C4N,aAAa,IAEXuE,EAA2BR,GAAeM,EAAmB5B,GAC7D+B,EAAsBT,GAAeO,EAAmBnK,EAAY6J,GACpES,EAAoBR,GAAsBM,GAC1CG,EAAmBT,GAAsBO,GAC7CpS,EAAMmG,cAAcxG,GAAQ,CAC1BwS,yBAA0BA,EAC1BC,oBAAqBA,EACrBC,kBAAmBA,EACnBC,iBAAkBA,GAEpBtS,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,+BAAgC2U,EAChC,sBAAuBC,GAE3B,GCJA,IACE3S,KAAM,SACNC,SAAS,EACTC,MAAO,OACPwB,SAAU,CAAC,iBACXvB,GA5BF,SAAgBa,GACd,IAAIX,EAAQW,EAAMX,MACdc,EAAUH,EAAMG,QAChBnB,EAAOgB,EAAMhB,KACb4S,EAAkBzR,EAAQuG,OAC1BA,OAA6B,IAApBkL,EAA6B,CAAC,EAAG,GAAKA,EAC/C7I,EAAO,EAAW7L,QAAO,SAAUC,EAAKC,GAE1C,OADAD,EAAIC,GA5BD,SAAiCA,EAAWyI,EAAOa,GACxD,IAAIjB,EAAgB9E,EAAiBvD,GACjCyU,EAAiB,CAACrV,EAAM,GAAKqH,QAAQ4B,IAAkB,GAAK,EAAI,EAEhErG,EAAyB,mBAAXsH,EAAwBA,EAAOhL,OAAOkE,OAAO,CAAC,EAAGiG,EAAO,CACxEzI,UAAWA,KACPsJ,EACFoL,EAAW1S,EAAK,GAChB2S,EAAW3S,EAAK,GAIpB,OAFA0S,EAAWA,GAAY,EACvBC,GAAYA,GAAY,GAAKF,EACtB,CAACrV,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAAI,CACjD7C,EAAGmP,EACHjP,EAAGgP,GACD,CACFlP,EAAGkP,EACHhP,EAAGiP,EAEP,CASqBC,CAAwB5U,EAAWiC,EAAMwG,MAAOa,GAC1DvJ,CACT,GAAG,CAAC,GACA8U,EAAwBlJ,EAAK1J,EAAMjC,WACnCwF,EAAIqP,EAAsBrP,EAC1BE,EAAImP,EAAsBnP,EAEW,MAArCzD,EAAMmG,cAAcD,gBACtBlG,EAAMmG,cAAcD,cAAc3C,GAAKA,EACvCvD,EAAMmG,cAAcD,cAAczC,GAAKA,GAGzCzD,EAAMmG,cAAcxG,GAAQ+J,CAC9B,GC1BA,IACE/J,KAAM,gBACNC,SAAS,EACTC,MAAO,OACPC,GApBF,SAAuBC,GACrB,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KAKhBK,EAAMmG,cAAcxG,GAAQkN,GAAe,CACzClP,UAAWqC,EAAMwG,MAAM7I,UACvBiB,QAASoB,EAAMwG,MAAM9I,OACrBqD,SAAU,WACVhD,UAAWiC,EAAMjC,WAErB,EAQE2L,KAAM,CAAC,GCgHT,IACE/J,KAAM,kBACNC,SAAS,EACTC,MAAO,OACPC,GA/HF,SAAyBC,GACvB,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KACZoP,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAsCA,EACrD3B,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtBrH,EAAUzF,EAAQyF,QAClBsM,EAAkB/R,EAAQgS,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAwBjS,EAAQkS,aAChCA,OAAyC,IAA1BD,EAAmC,EAAIA,EACtD5H,EAAW8B,GAAejN,EAAO,CACnCsN,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTqH,YAAaA,IAEXxH,EAAgB9E,EAAiBtB,EAAMjC,WACvCiK,EAAYL,EAAa3H,EAAMjC,WAC/BkV,GAAmBjL,EACnBgF,EAAWtH,EAAyBU,GACpC8I,ECrCY,MDqCSlC,ECrCH,IAAM,IDsCxB9G,EAAgBlG,EAAMmG,cAAcD,cACpCmK,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBwV,EAA4C,mBAAjBF,EAA8BA,EAAa3W,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CACvGzI,UAAWiC,EAAMjC,aACbiV,EACFG,EAA2D,iBAAtBD,EAAiC,CACxElG,SAAUkG,EACVhE,QAASgE,GACP7W,OAAOkE,OAAO,CAChByM,SAAU,EACVkC,QAAS,GACRgE,GACCE,EAAsBpT,EAAMmG,cAAckB,OAASrH,EAAMmG,cAAckB,OAAOrH,EAAMjC,WAAa,KACjG2L,EAAO,CACTnG,EAAG,EACHE,EAAG,GAGL,GAAKyC,EAAL,CAIA,GAAI8I,EAAe,CACjB,IAAIqE,EAEAC,EAAwB,MAAbtG,EAAmB,EAAM7P,EACpCoW,EAAuB,MAAbvG,EAAmB/P,EAASC,EACtCoJ,EAAmB,MAAb0G,EAAmB,SAAW,QACpC3F,EAASnB,EAAc8G,GACvBtL,EAAM2F,EAAS8D,EAASmI,GACxB7R,EAAM4F,EAAS8D,EAASoI,GACxBC,EAAWV,GAAU/K,EAAWzB,GAAO,EAAI,EAC3CmN,EAASzL,IAAc1K,EAAQ+S,EAAc/J,GAAOyB,EAAWzB,GAC/DoN,EAAS1L,IAAc1K,GAASyK,EAAWzB,IAAQ+J,EAAc/J,GAGjEL,EAAejG,EAAME,SAASgB,MAC9BwF,EAAYoM,GAAU7M,EAAetC,EAAcsC,GAAgB,CACrE/C,MAAO,EACPE,OAAQ,GAENuQ,GAAqB3T,EAAMmG,cAAc,oBAAsBnG,EAAMmG,cAAc,oBAAoBI,QxBhFtG,CACLvF,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GwB6EFyW,GAAkBD,GAAmBL,GACrCO,GAAkBF,GAAmBJ,GAMrCO,GAAWnO,EAAO,EAAG0K,EAAc/J,GAAMI,EAAUJ,IACnDyN,GAAYd,EAAkB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWF,GAAkBT,EAA4BnG,SAAWyG,EAASK,GAAWF,GAAkBT,EAA4BnG,SACxMgH,GAAYf,GAAmB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWD,GAAkBV,EAA4BnG,SAAW0G,EAASI,GAAWD,GAAkBV,EAA4BnG,SACzMjG,GAAoB/G,EAAME,SAASgB,OAAS8D,EAAgBhF,EAAME,SAASgB,OAC3E+S,GAAelN,GAAiC,MAAbiG,EAAmBjG,GAAkBsF,WAAa,EAAItF,GAAkBuF,YAAc,EAAI,EAC7H4H,GAAwH,OAAjGb,EAA+C,MAAvBD,OAA8B,EAASA,EAAoBpG,IAAqBqG,EAAwB,EAEvJc,GAAY9M,EAAS2M,GAAYE,GACjCE,GAAkBzO,EAAOmN,EAAS,EAAQpR,EAF9B2F,EAAS0M,GAAYG,GAAsBD,IAEKvS,EAAK2F,EAAQyL,EAAS,EAAQrR,EAAK0S,IAAa1S,GAChHyE,EAAc8G,GAAYoH,GAC1B1K,EAAKsD,GAAYoH,GAAkB/M,CACrC,CAEA,GAAI8H,EAAc,CAChB,IAAIkF,GAEAC,GAAyB,MAAbtH,EAAmB,EAAM7P,EAErCoX,GAAwB,MAAbvH,EAAmB/P,EAASC,EAEvCsX,GAAUtO,EAAcgJ,GAExBuF,GAAmB,MAAZvF,EAAkB,SAAW,QAEpCwF,GAAOF,GAAUrJ,EAASmJ,IAE1BK,GAAOH,GAAUrJ,EAASoJ,IAE1BK,IAAuD,IAAxC,CAAC,EAAKzX,GAAMqH,QAAQ4B,GAEnCyO,GAAyH,OAAjGR,GAAgD,MAAvBjB,OAA8B,EAASA,EAAoBlE,IAAoBmF,GAAyB,EAEzJS,GAAaF,GAAeF,GAAOF,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAEzI6F,GAAaH,GAAeJ,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAAUyF,GAE5IK,GAAmBlC,GAAU8B,G1BzH9B,SAAwBlT,EAAK1E,EAAOyE,GACzC,IAAIwT,EAAItP,EAAOjE,EAAK1E,EAAOyE,GAC3B,OAAOwT,EAAIxT,EAAMA,EAAMwT,CACzB,C0BsHoDC,CAAeJ,GAAYN,GAASO,IAAcpP,EAAOmN,EAASgC,GAAaJ,GAAMF,GAAS1B,EAASiC,GAAaJ,IAEpKzO,EAAcgJ,GAAW8F,GACzBtL,EAAKwF,GAAW8F,GAAmBR,EACrC,CAEAxU,EAAMmG,cAAcxG,GAAQ+J,CAvE5B,CAwEF,EAQEhC,iBAAkB,CAAC,WE1HN,SAASyN,GAAiBC,EAAyBrQ,EAAcsD,QAC9D,IAAZA,IACFA,GAAU,GAGZ,ICnBoCrJ,ECJOJ,EFuBvCyW,EAA0B9V,EAAcwF,GACxCuQ,EAAuB/V,EAAcwF,IAf3C,SAAyBnG,GACvB,IAAImN,EAAOnN,EAAQ+D,wBACfI,EAASpB,EAAMoK,EAAK7I,OAAStE,EAAQqE,aAAe,EACpDD,EAASrB,EAAMoK,EAAK3I,QAAUxE,EAAQuE,cAAgB,EAC1D,OAAkB,IAAXJ,GAA2B,IAAXC,CACzB,CAU4DuS,CAAgBxQ,GACtEJ,EAAkBF,EAAmBM,GACrCgH,EAAOpJ,EAAsByS,EAAyBE,EAAsBjN,GAC5EyB,EAAS,CACXc,WAAY,EACZE,UAAW,GAET7C,EAAU,CACZ1E,EAAG,EACHE,EAAG,GAkBL,OAfI4R,IAA4BA,IAA4BhN,MACxB,SAA9B1J,EAAYoG,IAChBkG,GAAetG,MACbmF,GCnCgC9K,EDmCT+F,KClCdhG,EAAUC,IAAUO,EAAcP,GCJxC,CACL4L,YAFyChM,EDQbI,GCNR4L,WACpBE,UAAWlM,EAAQkM,WDGZH,GAAgB3L,IDoCnBO,EAAcwF,KAChBkD,EAAUtF,EAAsBoC,GAAc,IACtCxB,GAAKwB,EAAauH,WAC1BrE,EAAQxE,GAAKsB,EAAasH,WACjB1H,IACTsD,EAAQ1E,EAAIyH,GAAoBrG,KAI7B,CACLpB,EAAGwI,EAAK5O,KAAO2M,EAAOc,WAAa3C,EAAQ1E,EAC3CE,EAAGsI,EAAK/K,IAAM8I,EAAOgB,UAAY7C,EAAQxE,EACzCP,MAAO6I,EAAK7I,MACZE,OAAQ2I,EAAK3I,OAEjB,CGvDA,SAASoS,GAAMC,GACb,IAAItT,EAAM,IAAIoO,IACVmF,EAAU,IAAIC,IACdC,EAAS,GAKb,SAAS3F,EAAK4F,GACZH,EAAQI,IAAID,EAASlW,MACN,GAAG3B,OAAO6X,EAASxU,UAAY,GAAIwU,EAASnO,kBAAoB,IACtEvH,SAAQ,SAAU4V,GACzB,IAAKL,EAAQM,IAAID,GAAM,CACrB,IAAIE,EAAc9T,EAAI3F,IAAIuZ,GAEtBE,GACFhG,EAAKgG,EAET,CACF,IACAL,EAAO3E,KAAK4E,EACd,CAQA,OAzBAJ,EAAUtV,SAAQ,SAAU0V,GAC1B1T,EAAIiP,IAAIyE,EAASlW,KAAMkW,EACzB,IAiBAJ,EAAUtV,SAAQ,SAAU0V,GACrBH,EAAQM,IAAIH,EAASlW,OAExBsQ,EAAK4F,EAET,IACOD,CACT,CCvBA,IAAIM,GAAkB,CACpBnY,UAAW,SACX0X,UAAW,GACX1U,SAAU,YAGZ,SAASoV,KACP,IAAK,IAAI1B,EAAO2B,UAAUrG,OAAQsG,EAAO,IAAIpU,MAAMwS,GAAO6B,EAAO,EAAGA,EAAO7B,EAAM6B,IAC/ED,EAAKC,GAAQF,UAAUE,GAGzB,OAAQD,EAAKvE,MAAK,SAAUlT,GAC1B,QAASA,GAAoD,mBAAlCA,EAAQ+D,sBACrC,GACF,CAEO,SAAS4T,GAAgBC,QACL,IAArBA,IACFA,EAAmB,CAAC,GAGtB,IAAIC,EAAoBD,EACpBE,EAAwBD,EAAkBE,iBAC1CA,OAA6C,IAA1BD,EAAmC,GAAKA,EAC3DE,EAAyBH,EAAkBI,eAC3CA,OAA4C,IAA3BD,EAAoCV,GAAkBU,EAC3E,OAAO,SAAsBjZ,EAAWD,EAAQoD,QAC9B,IAAZA,IACFA,EAAU+V,GAGZ,ICxC6B/W,EAC3BgX,EDuCE9W,EAAQ,CACVjC,UAAW,SACXgZ,iBAAkB,GAClBjW,QAASzE,OAAOkE,OAAO,CAAC,EAAG2V,GAAiBW,GAC5C1Q,cAAe,CAAC,EAChBjG,SAAU,CACRvC,UAAWA,EACXD,OAAQA,GAEV4C,WAAY,CAAC,EACbD,OAAQ,CAAC,GAEP2W,EAAmB,GACnBC,GAAc,EACdrN,EAAW,CACb5J,MAAOA,EACPkX,WAAY,SAAoBC,GAC9B,IAAIrW,EAAsC,mBAArBqW,EAAkCA,EAAiBnX,EAAMc,SAAWqW,EACzFC,IACApX,EAAMc,QAAUzE,OAAOkE,OAAO,CAAC,EAAGsW,EAAgB7W,EAAMc,QAASA,GACjEd,EAAMiK,cAAgB,CACpBtM,UAAW0B,EAAU1B,GAAa6N,GAAkB7N,GAAaA,EAAU4Q,eAAiB/C,GAAkB7N,EAAU4Q,gBAAkB,GAC1I7Q,OAAQ8N,GAAkB9N,IAI5B,IElE4B+X,EAC9B4B,EFiEMN,EDhCG,SAAwBtB,GAErC,IAAIsB,EAAmBvB,GAAMC,GAE7B,OAAO/W,EAAeb,QAAO,SAAUC,EAAK+B,GAC1C,OAAO/B,EAAIE,OAAO+Y,EAAiBvR,QAAO,SAAUqQ,GAClD,OAAOA,EAAShW,QAAUA,CAC5B,IACF,GAAG,GACL,CCuB+ByX,EElEK7B,EFkEsB,GAAGzX,OAAO2Y,EAAkB3W,EAAMc,QAAQ2U,WEjE9F4B,EAAS5B,EAAU5X,QAAO,SAAUwZ,EAAQE,GAC9C,IAAIC,EAAWH,EAAOE,EAAQ5X,MAK9B,OAJA0X,EAAOE,EAAQ5X,MAAQ6X,EAAWnb,OAAOkE,OAAO,CAAC,EAAGiX,EAAUD,EAAS,CACrEzW,QAASzE,OAAOkE,OAAO,CAAC,EAAGiX,EAAS1W,QAASyW,EAAQzW,SACrD4I,KAAMrN,OAAOkE,OAAO,CAAC,EAAGiX,EAAS9N,KAAM6N,EAAQ7N,QAC5C6N,EACEF,CACT,GAAG,CAAC,GAEGhb,OAAO4D,KAAKoX,GAAQlV,KAAI,SAAUhG,GACvC,OAAOkb,EAAOlb,EAChB,MF4DM,OAJA6D,EAAM+W,iBAAmBA,EAAiBvR,QAAO,SAAUiS,GACzD,OAAOA,EAAE7X,OACX,IA+FFI,EAAM+W,iBAAiB5W,SAAQ,SAAUJ,GACvC,IAAIJ,EAAOI,EAAKJ,KACZ+X,EAAe3X,EAAKe,QACpBA,OAA2B,IAAjB4W,EAA0B,CAAC,EAAIA,EACzChX,EAASX,EAAKW,OAElB,GAAsB,mBAAXA,EAAuB,CAChC,IAAIiX,EAAYjX,EAAO,CACrBV,MAAOA,EACPL,KAAMA,EACNiK,SAAUA,EACV9I,QAASA,IAKXkW,EAAiB/F,KAAK0G,GAFT,WAAmB,EAGlC,CACF,IA/GS/N,EAASQ,QAClB,EAMAwN,YAAa,WACX,IAAIX,EAAJ,CAIA,IAAIY,EAAkB7X,EAAME,SACxBvC,EAAYka,EAAgBla,UAC5BD,EAASma,EAAgBna,OAG7B,GAAKyY,GAAiBxY,EAAWD,GAAjC,CAKAsC,EAAMwG,MAAQ,CACZ7I,UAAWwX,GAAiBxX,EAAWqH,EAAgBtH,GAAoC,UAA3BsC,EAAMc,QAAQC,UAC9ErD,OAAQiG,EAAcjG,IAOxBsC,EAAM0R,OAAQ,EACd1R,EAAMjC,UAAYiC,EAAMc,QAAQ/C,UAKhCiC,EAAM+W,iBAAiB5W,SAAQ,SAAU0V,GACvC,OAAO7V,EAAMmG,cAAc0P,EAASlW,MAAQtD,OAAOkE,OAAO,CAAC,EAAGsV,EAASnM,KACzE,IAEA,IAAK,IAAIoO,EAAQ,EAAGA,EAAQ9X,EAAM+W,iBAAiBhH,OAAQ+H,IACzD,IAAoB,IAAhB9X,EAAM0R,MAAV,CAMA,IAAIqG,EAAwB/X,EAAM+W,iBAAiBe,GAC/ChY,EAAKiY,EAAsBjY,GAC3BkY,EAAyBD,EAAsBjX,QAC/CoM,OAAsC,IAA3B8K,EAAoC,CAAC,EAAIA,EACpDrY,EAAOoY,EAAsBpY,KAEf,mBAAPG,IACTE,EAAQF,EAAG,CACTE,MAAOA,EACPc,QAASoM,EACTvN,KAAMA,EACNiK,SAAUA,KACN5J,EAdR,MAHEA,EAAM0R,OAAQ,EACdoG,GAAS,CAzBb,CATA,CAqDF,EAGA1N,QC1I2BtK,ED0IV,WACf,OAAO,IAAImY,SAAQ,SAAUC,GAC3BtO,EAASgO,cACTM,EAAQlY,EACV,GACF,EC7IG,WAUL,OATK8W,IACHA,EAAU,IAAImB,SAAQ,SAAUC,GAC9BD,QAAQC,UAAUC,MAAK,WACrBrB,OAAUsB,EACVF,EAAQpY,IACV,GACF,KAGKgX,CACT,GDmIIuB,QAAS,WACPjB,IACAH,GAAc,CAChB,GAGF,IAAKd,GAAiBxY,EAAWD,GAC/B,OAAOkM,EAmCT,SAASwN,IACPJ,EAAiB7W,SAAQ,SAAUL,GACjC,OAAOA,GACT,IACAkX,EAAmB,EACrB,CAEA,OAvCApN,EAASsN,WAAWpW,GAASqX,MAAK,SAAUnY,IACrCiX,GAAenW,EAAQwX,eAC1BxX,EAAQwX,cAActY,EAE1B,IAmCO4J,CACT,CACF,CACO,IAAI2O,GAA4BhC,KGzLnC,GAA4BA,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,EAAa,GAAQ,GAAM,GAAiB,EAAO,MCJrH,GAA4BjC,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,KCatE,MAAMC,GAAa,IAAIlI,IACjBmI,GAAO,CACX,GAAAtH,CAAIxS,EAASzC,EAAKyN,GACX6O,GAAWzC,IAAIpX,IAClB6Z,GAAWrH,IAAIxS,EAAS,IAAI2R,KAE9B,MAAMoI,EAAcF,GAAWjc,IAAIoC,GAI9B+Z,EAAY3C,IAAI7Z,IAA6B,IAArBwc,EAAYC,KAKzCD,EAAYvH,IAAIjV,EAAKyN,GAHnBiP,QAAQC,MAAM,+EAA+E7W,MAAM8W,KAAKJ,EAAY1Y,QAAQ,MAIhI,EACAzD,IAAG,CAACoC,EAASzC,IACPsc,GAAWzC,IAAIpX,IACV6Z,GAAWjc,IAAIoC,GAASpC,IAAIL,IAE9B,KAET,MAAA6c,CAAOpa,EAASzC,GACd,IAAKsc,GAAWzC,IAAIpX,GAClB,OAEF,MAAM+Z,EAAcF,GAAWjc,IAAIoC,GACnC+Z,EAAYM,OAAO9c,GAGM,IAArBwc,EAAYC,MACdH,GAAWQ,OAAOra,EAEtB,GAYIsa,GAAiB,gBAOjBC,GAAgBC,IAChBA,GAAYna,OAAOoa,KAAOpa,OAAOoa,IAAIC,SAEvCF,EAAWA,EAAS5O,QAAQ,iBAAiB,CAAC+O,EAAOC,IAAO,IAAIH,IAAIC,OAAOE,QAEtEJ,GA4CHK,GAAuB7a,IAC3BA,EAAQ8a,cAAc,IAAIC,MAAMT,IAAgB,EAE5C,GAAYU,MACXA,GAA4B,iBAAXA,UAGO,IAAlBA,EAAOC,SAChBD,EAASA,EAAO,SAEgB,IAApBA,EAAOE,UAEjBC,GAAaH,GAEb,GAAUA,GACLA,EAAOC,OAASD,EAAO,GAAKA,EAEf,iBAAXA,GAAuBA,EAAO7J,OAAS,EACzCrL,SAAS+C,cAAc0R,GAAcS,IAEvC,KAEHI,GAAYpb,IAChB,IAAK,GAAUA,IAAgD,IAApCA,EAAQqb,iBAAiBlK,OAClD,OAAO,EAET,MAAMmK,EAAgF,YAA7D5V,iBAAiB1F,GAASub,iBAAiB,cAE9DC,EAAgBxb,EAAQyb,QAAQ,uBACtC,IAAKD,EACH,OAAOF,EAET,GAAIE,IAAkBxb,EAAS,CAC7B,MAAM0b,EAAU1b,EAAQyb,QAAQ,WAChC,GAAIC,GAAWA,EAAQlW,aAAegW,EACpC,OAAO,EAET,GAAgB,OAAZE,EACF,OAAO,CAEX,CACA,OAAOJ,CAAgB,EAEnBK,GAAa3b,IACZA,GAAWA,EAAQkb,WAAaU,KAAKC,gBAGtC7b,EAAQ8b,UAAU7W,SAAS,mBAGC,IAArBjF,EAAQ+b,SACV/b,EAAQ+b,SAEV/b,EAAQgc,aAAa,aAAoD,UAArChc,EAAQic,aAAa,aAE5DC,GAAiBlc,IACrB,IAAK8F,SAASC,gBAAgBoW,aAC5B,OAAO,KAIT,GAAmC,mBAAxBnc,EAAQqF,YAA4B,CAC7C,MAAM+W,EAAOpc,EAAQqF,cACrB,OAAO+W,aAAgBtb,WAAasb,EAAO,IAC7C,CACA,OAAIpc,aAAmBc,WACdd,EAIJA,EAAQwF,WAGN0W,GAAelc,EAAQwF,YAFrB,IAEgC,EAErC6W,GAAO,OAUPC,GAAStc,IACbA,EAAQuE,YAAY,EAEhBgY,GAAY,IACZlc,OAAOmc,SAAW1W,SAAS6G,KAAKqP,aAAa,qBACxC3b,OAAOmc,OAET,KAEHC,GAA4B,GAgB5BC,GAAQ,IAAuC,QAAjC5W,SAASC,gBAAgB4W,IACvCC,GAAqBC,IAhBAC,QAiBN,KACjB,MAAMC,EAAIR,KAEV,GAAIQ,EAAG,CACL,MAAMhc,EAAO8b,EAAOG,KACdC,EAAqBF,EAAE7b,GAAGH,GAChCgc,EAAE7b,GAAGH,GAAQ8b,EAAOK,gBACpBH,EAAE7b,GAAGH,GAAMoc,YAAcN,EACzBE,EAAE7b,GAAGH,GAAMqc,WAAa,KACtBL,EAAE7b,GAAGH,GAAQkc,EACNJ,EAAOK,gBAElB,GA5B0B,YAAxBpX,SAASuX,YAENZ,GAA0BtL,QAC7BrL,SAASyF,iBAAiB,oBAAoB,KAC5C,IAAK,MAAMuR,KAAYL,GACrBK,GACF,IAGJL,GAA0BpK,KAAKyK,IAE/BA,GAkBA,EAEEQ,GAAU,CAACC,EAAkB9F,EAAO,GAAI+F,EAAeD,IACxB,mBAArBA,EAAkCA,KAAoB9F,GAAQ+F,EAExEC,GAAyB,CAACX,EAAUY,EAAmBC,GAAoB,KAC/E,IAAKA,EAEH,YADAL,GAAQR,GAGV,MACMc,EA/JiC5d,KACvC,IAAKA,EACH,OAAO,EAIT,IAAI,mBACF6d,EAAkB,gBAClBC,GACEzd,OAAOqF,iBAAiB1F,GAC5B,MAAM+d,EAA0BC,OAAOC,WAAWJ,GAC5CK,EAAuBF,OAAOC,WAAWH,GAG/C,OAAKC,GAA4BG,GAKjCL,EAAqBA,EAAmBlb,MAAM,KAAK,GACnDmb,EAAkBA,EAAgBnb,MAAM,KAAK,GAtDf,KAuDtBqb,OAAOC,WAAWJ,GAAsBG,OAAOC,WAAWH,KANzD,CAMoG,EA0IpFK,CAAiCT,GADlC,EAExB,IAAIU,GAAS,EACb,MAAMC,EAAU,EACdrR,aAEIA,IAAW0Q,IAGfU,GAAS,EACTV,EAAkBjS,oBAAoB6O,GAAgB+D,GACtDf,GAAQR,GAAS,EAEnBY,EAAkBnS,iBAAiB+O,GAAgB+D,GACnDC,YAAW,KACJF,GACHvD,GAAqB6C,EACvB,GACCE,EAAiB,EAYhBW,GAAuB,CAAC1R,EAAM2R,EAAeC,EAAeC,KAChE,MAAMC,EAAa9R,EAAKsE,OACxB,IAAI+H,EAAQrM,EAAKjH,QAAQ4Y,GAIzB,OAAe,IAAXtF,GACMuF,GAAiBC,EAAiB7R,EAAK8R,EAAa,GAAK9R,EAAK,IAExEqM,GAASuF,EAAgB,GAAK,EAC1BC,IACFxF,GAASA,EAAQyF,GAAcA,GAE1B9R,EAAKjK,KAAKC,IAAI,EAAGD,KAAKE,IAAIoW,EAAOyF,EAAa,KAAI,EAerDC,GAAiB,qBACjBC,GAAiB,OACjBC,GAAgB,SAChBC,GAAgB,CAAC,EACvB,IAAIC,GAAW,EACf,MAAMC,GAAe,CACnBC,WAAY,YACZC,WAAY,YAERC,GAAe,IAAIrI,IAAI,CAAC,QAAS,WAAY,UAAW,YAAa,cAAe,aAAc,iBAAkB,YAAa,WAAY,YAAa,cAAe,YAAa,UAAW,WAAY,QAAS,oBAAqB,aAAc,YAAa,WAAY,cAAe,cAAe,cAAe,YAAa,eAAgB,gBAAiB,eAAgB,gBAAiB,aAAc,QAAS,OAAQ,SAAU,QAAS,SAAU,SAAU,UAAW,WAAY,OAAQ,SAAU,eAAgB,SAAU,OAAQ,mBAAoB,mBAAoB,QAAS,QAAS,WAM/lB,SAASsI,GAAarf,EAASsf,GAC7B,OAAOA,GAAO,GAAGA,MAAQN,QAAgBhf,EAAQgf,UAAYA,IAC/D,CACA,SAASO,GAAiBvf,GACxB,MAAMsf,EAAMD,GAAarf,GAGzB,OAFAA,EAAQgf,SAAWM,EACnBP,GAAcO,GAAOP,GAAcO,IAAQ,CAAC,EACrCP,GAAcO,EACvB,CAiCA,SAASE,GAAYC,EAAQC,EAAUC,EAAqB,MAC1D,OAAOliB,OAAOmiB,OAAOH,GAAQ7M,MAAKiN,GAASA,EAAMH,WAAaA,GAAYG,EAAMF,qBAAuBA,GACzG,CACA,SAASG,GAAoBC,EAAmB1B,EAAS2B,GACvD,MAAMC,EAAiC,iBAAZ5B,EAErBqB,EAAWO,EAAcD,EAAqB3B,GAAW2B,EAC/D,IAAIE,EAAYC,GAAaJ,GAI7B,OAHKX,GAAahI,IAAI8I,KACpBA,EAAYH,GAEP,CAACE,EAAaP,EAAUQ,EACjC,CACA,SAASE,GAAWpgB,EAAS+f,EAAmB1B,EAAS2B,EAAoBK,GAC3E,GAAiC,iBAAtBN,IAAmC/f,EAC5C,OAEF,IAAKigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GAIzF,GAAID,KAAqBd,GAAc,CACrC,MAAMqB,EAAepf,GACZ,SAAU2e,GACf,IAAKA,EAAMU,eAAiBV,EAAMU,gBAAkBV,EAAMW,iBAAmBX,EAAMW,eAAevb,SAAS4a,EAAMU,eAC/G,OAAOrf,EAAGjD,KAAKwiB,KAAMZ,EAEzB,EAEFH,EAAWY,EAAaZ,EAC1B,CACA,MAAMD,EAASF,GAAiBvf,GAC1B0gB,EAAWjB,EAAOS,KAAeT,EAAOS,GAAa,CAAC,GACtDS,EAAmBnB,GAAYkB,EAAUhB,EAAUO,EAAc5B,EAAU,MACjF,GAAIsC,EAEF,YADAA,EAAiBN,OAASM,EAAiBN,QAAUA,GAGvD,MAAMf,EAAMD,GAAaK,EAAUK,EAAkBnU,QAAQgT,GAAgB,KACvE1d,EAAK+e,EA5Db,SAAoCjgB,EAASwa,EAAUtZ,GACrD,OAAO,SAASmd,EAAQwB,GACtB,MAAMe,EAAc5gB,EAAQ6gB,iBAAiBrG,GAC7C,IAAK,IAAI,OACPxN,GACE6S,EAAO7S,GAAUA,IAAWyT,KAAMzT,EAASA,EAAOxH,WACpD,IAAK,MAAMsb,KAAcF,EACvB,GAAIE,IAAe9T,EASnB,OANA+T,GAAWlB,EAAO,CAChBW,eAAgBxT,IAEdqR,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAM1G,EAAUtZ,GAE3CA,EAAGigB,MAAMnU,EAAQ,CAAC6S,GAG/B,CACF,CAwC2BuB,CAA2BphB,EAASqe,EAASqB,GAvExE,SAA0B1f,EAASkB,GACjC,OAAO,SAASmd,EAAQwB,GAOtB,OANAkB,GAAWlB,EAAO,CAChBW,eAAgBxgB,IAEdqe,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAMhgB,GAEjCA,EAAGigB,MAAMnhB,EAAS,CAAC6f,GAC5B,CACF,CA6DoFwB,CAAiBrhB,EAAS0f,GAC5Gxe,EAAGye,mBAAqBM,EAAc5B,EAAU,KAChDnd,EAAGwe,SAAWA,EACdxe,EAAGmf,OAASA,EACZnf,EAAG8d,SAAWM,EACdoB,EAASpB,GAAOpe,EAChBlB,EAAQuL,iBAAiB2U,EAAWhf,EAAI+e,EAC1C,CACA,SAASqB,GAActhB,EAASyf,EAAQS,EAAW7B,EAASsB,GAC1D,MAAMze,EAAKse,GAAYC,EAAOS,GAAY7B,EAASsB,GAC9Cze,IAGLlB,EAAQyL,oBAAoByU,EAAWhf,EAAIqgB,QAAQ5B,WAC5CF,EAAOS,GAAWhf,EAAG8d,UAC9B,CACA,SAASwC,GAAyBxhB,EAASyf,EAAQS,EAAWuB,GAC5D,MAAMC,EAAoBjC,EAAOS,IAAc,CAAC,EAChD,IAAK,MAAOyB,EAAY9B,KAAUpiB,OAAOmkB,QAAQF,GAC3CC,EAAWE,SAASJ,IACtBH,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAGtE,CACA,SAASQ,GAAaN,GAGpB,OADAA,EAAQA,EAAMjU,QAAQiT,GAAgB,IAC/BI,GAAaY,IAAUA,CAChC,CACA,MAAMmB,GAAe,CACnB,EAAAc,CAAG9hB,EAAS6f,EAAOxB,EAAS2B,GAC1BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAA+B,CAAI/hB,EAAS6f,EAAOxB,EAAS2B,GAC3BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAAiB,CAAIjhB,EAAS+f,EAAmB1B,EAAS2B,GACvC,GAAiC,iBAAtBD,IAAmC/f,EAC5C,OAEF,MAAOigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GACrFgC,EAAc9B,IAAcH,EAC5BN,EAASF,GAAiBvf,GAC1B0hB,EAAoBjC,EAAOS,IAAc,CAAC,EAC1C+B,EAAclC,EAAkBmC,WAAW,KACjD,QAAwB,IAAbxC,EAAX,CAQA,GAAIuC,EACF,IAAK,MAAME,KAAgB1kB,OAAO4D,KAAKoe,GACrC+B,GAAyBxhB,EAASyf,EAAQ0C,EAAcpC,EAAkBlN,MAAM,IAGpF,IAAK,MAAOuP,EAAavC,KAAUpiB,OAAOmkB,QAAQF,GAAoB,CACpE,MAAMC,EAAaS,EAAYxW,QAAQkT,GAAe,IACjDkD,IAAejC,EAAkB8B,SAASF,IAC7CL,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAEpE,CAXA,KAPA,CAEE,IAAKliB,OAAO4D,KAAKqgB,GAAmBvQ,OAClC,OAEFmQ,GAActhB,EAASyf,EAAQS,EAAWR,EAAUO,EAAc5B,EAAU,KAE9E,CAYF,EACA,OAAAgE,CAAQriB,EAAS6f,EAAOpI,GACtB,GAAqB,iBAAVoI,IAAuB7f,EAChC,OAAO,KAET,MAAM+c,EAAIR,KAGV,IAAI+F,EAAc,KACdC,GAAU,EACVC,GAAiB,EACjBC,GAAmB,EAJH5C,IADFM,GAAaN,IAMZ9C,IACjBuF,EAAcvF,EAAEhC,MAAM8E,EAAOpI,GAC7BsF,EAAE/c,GAASqiB,QAAQC,GACnBC,GAAWD,EAAYI,uBACvBF,GAAkBF,EAAYK,gCAC9BF,EAAmBH,EAAYM,sBAEjC,MAAMC,EAAM9B,GAAW,IAAIhG,MAAM8E,EAAO,CACtC0C,UACAO,YAAY,IACVrL,GAUJ,OATIgL,GACFI,EAAIE,iBAEFP,GACFxiB,EAAQ8a,cAAc+H,GAEpBA,EAAIJ,kBAAoBH,GAC1BA,EAAYS,iBAEPF,CACT,GAEF,SAAS9B,GAAWljB,EAAKmlB,EAAO,CAAC,GAC/B,IAAK,MAAOzlB,EAAKa,KAAUX,OAAOmkB,QAAQoB,GACxC,IACEnlB,EAAIN,GAAOa,CACb,CAAE,MAAO6kB,GACPxlB,OAAOC,eAAeG,EAAKN,EAAK,CAC9B2lB,cAAc,EACdtlB,IAAG,IACMQ,GAGb,CAEF,OAAOP,CACT,CASA,SAASslB,GAAc/kB,GACrB,GAAc,SAAVA,EACF,OAAO,EAET,GAAc,UAAVA,EACF,OAAO,EAET,GAAIA,IAAU4f,OAAO5f,GAAOkC,WAC1B,OAAO0d,OAAO5f,GAEhB,GAAc,KAAVA,GAA0B,SAAVA,EAClB,OAAO,KAET,GAAqB,iBAAVA,EACT,OAAOA,EAET,IACE,OAAOglB,KAAKC,MAAMC,mBAAmBllB,GACvC,CAAE,MAAO6kB,GACP,OAAO7kB,CACT,CACF,CACA,SAASmlB,GAAiBhmB,GACxB,OAAOA,EAAIqO,QAAQ,UAAU4X,GAAO,IAAIA,EAAItjB,iBAC9C,CACA,MAAMujB,GAAc,CAClB,gBAAAC,CAAiB1jB,EAASzC,EAAKa,GAC7B4B,EAAQ6B,aAAa,WAAW0hB,GAAiBhmB,KAAQa,EAC3D,EACA,mBAAAulB,CAAoB3jB,EAASzC,GAC3ByC,EAAQ4B,gBAAgB,WAAW2hB,GAAiBhmB,KACtD,EACA,iBAAAqmB,CAAkB5jB,GAChB,IAAKA,EACH,MAAO,CAAC,EAEV,MAAM0B,EAAa,CAAC,EACdmiB,EAASpmB,OAAO4D,KAAKrB,EAAQ8jB,SAASld,QAAOrJ,GAAOA,EAAI2kB,WAAW,QAAU3kB,EAAI2kB,WAAW,cAClG,IAAK,MAAM3kB,KAAOsmB,EAAQ,CACxB,IAAIE,EAAUxmB,EAAIqO,QAAQ,MAAO,IACjCmY,EAAUA,EAAQC,OAAO,GAAG9jB,cAAgB6jB,EAAQlR,MAAM,EAAGkR,EAAQ5S,QACrEzP,EAAWqiB,GAAWZ,GAAcnjB,EAAQ8jB,QAAQvmB,GACtD,CACA,OAAOmE,CACT,EACAuiB,iBAAgB,CAACjkB,EAASzC,IACjB4lB,GAAcnjB,EAAQic,aAAa,WAAWsH,GAAiBhmB,QAgB1E,MAAM2mB,GAEJ,kBAAWC,GACT,MAAO,CAAC,CACV,CACA,sBAAWC,GACT,MAAO,CAAC,CACV,CACA,eAAWpH,GACT,MAAM,IAAIqH,MAAM,sEAClB,CACA,UAAAC,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAChB,OAAOA,CACT,CACA,eAAAC,CAAgBD,EAAQvkB,GACtB,MAAM2kB,EAAa,GAAU3kB,GAAWyjB,GAAYQ,iBAAiBjkB,EAAS,UAAY,CAAC,EAE3F,MAAO,IACFygB,KAAKmE,YAAYT,WACM,iBAAfQ,EAA0BA,EAAa,CAAC,KAC/C,GAAU3kB,GAAWyjB,GAAYG,kBAAkB5jB,GAAW,CAAC,KAC7C,iBAAXukB,EAAsBA,EAAS,CAAC,EAE/C,CACA,gBAAAG,CAAiBH,EAAQM,EAAcpE,KAAKmE,YAAYR,aACtD,IAAK,MAAO7hB,EAAUuiB,KAAkBrnB,OAAOmkB,QAAQiD,GAAc,CACnE,MAAMzmB,EAAQmmB,EAAOhiB,GACfwiB,EAAY,GAAU3mB,GAAS,UAhiBrC4c,OADSA,EAiiB+C5c,GA/hBnD,GAAG4c,IAELvd,OAAOM,UAAUuC,SAASrC,KAAK+c,GAAQL,MAAM,eAAe,GAAGza,cA8hBlE,IAAK,IAAI8kB,OAAOF,GAAehhB,KAAKihB,GAClC,MAAM,IAAIE,UAAU,GAAGxE,KAAKmE,YAAY5H,KAAKkI,0BAA0B3iB,qBAA4BwiB,yBAAiCD,MAExI,CAriBW9J,KAsiBb,EAqBF,MAAMmK,WAAsBjB,GAC1B,WAAAU,CAAY5kB,EAASukB,GACnBa,SACAplB,EAAUmb,GAAWnb,MAIrBygB,KAAK4E,SAAWrlB,EAChBygB,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/BzK,GAAKtH,IAAIiO,KAAK4E,SAAU5E,KAAKmE,YAAYW,SAAU9E,MACrD,CAGA,OAAA+E,GACE1L,GAAKM,OAAOqG,KAAK4E,SAAU5E,KAAKmE,YAAYW,UAC5CvE,GAAaC,IAAIR,KAAK4E,SAAU5E,KAAKmE,YAAYa,WACjD,IAAK,MAAMC,KAAgBjoB,OAAOkoB,oBAAoBlF,MACpDA,KAAKiF,GAAgB,IAEzB,CACA,cAAAE,CAAe9I,EAAU9c,EAAS6lB,GAAa,GAC7CpI,GAAuBX,EAAU9c,EAAS6lB,EAC5C,CACA,UAAAvB,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,EAAQ9D,KAAK4E,UAC3Cd,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CAGA,kBAAOuB,CAAY9lB,GACjB,OAAO8Z,GAAKlc,IAAIud,GAAWnb,GAAUygB,KAAK8E,SAC5C,CACA,0BAAOQ,CAAoB/lB,EAASukB,EAAS,CAAC,GAC5C,OAAO9D,KAAKqF,YAAY9lB,IAAY,IAAIygB,KAAKzgB,EAA2B,iBAAXukB,EAAsBA,EAAS,KAC9F,CACA,kBAAWyB,GACT,MA5CY,OA6Cd,CACA,mBAAWT,GACT,MAAO,MAAM9E,KAAKzD,MACpB,CACA,oBAAWyI,GACT,MAAO,IAAIhF,KAAK8E,UAClB,CACA,gBAAOU,CAAUllB,GACf,MAAO,GAAGA,IAAO0f,KAAKgF,WACxB,EAUF,MAAMS,GAAclmB,IAClB,IAAIwa,EAAWxa,EAAQic,aAAa,kBACpC,IAAKzB,GAAyB,MAAbA,EAAkB,CACjC,IAAI2L,EAAgBnmB,EAAQic,aAAa,QAMzC,IAAKkK,IAAkBA,EAActE,SAAS,OAASsE,EAAcjE,WAAW,KAC9E,OAAO,KAILiE,EAActE,SAAS,OAASsE,EAAcjE,WAAW,OAC3DiE,EAAgB,IAAIA,EAAcxjB,MAAM,KAAK,MAE/C6X,EAAW2L,GAAmC,MAAlBA,EAAwBA,EAAcC,OAAS,IAC7E,CACA,OAAO5L,EAAWA,EAAS7X,MAAM,KAAKY,KAAI8iB,GAAO9L,GAAc8L,KAAM1iB,KAAK,KAAO,IAAI,EAEjF2iB,GAAiB,CACrB1T,KAAI,CAAC4H,EAAUxa,EAAU8F,SAASC,kBACzB,GAAG3G,UAAUsB,QAAQ3C,UAAU8iB,iBAAiB5iB,KAAK+B,EAASwa,IAEvE+L,QAAO,CAAC/L,EAAUxa,EAAU8F,SAASC,kBAC5BrF,QAAQ3C,UAAU8K,cAAc5K,KAAK+B,EAASwa,GAEvDgM,SAAQ,CAACxmB,EAASwa,IACT,GAAGpb,UAAUY,EAAQwmB,UAAU5f,QAAOzB,GAASA,EAAMshB,QAAQjM,KAEtE,OAAAkM,CAAQ1mB,EAASwa,GACf,MAAMkM,EAAU,GAChB,IAAIC,EAAW3mB,EAAQwF,WAAWiW,QAAQjB,GAC1C,KAAOmM,GACLD,EAAQrU,KAAKsU,GACbA,EAAWA,EAASnhB,WAAWiW,QAAQjB,GAEzC,OAAOkM,CACT,EACA,IAAAE,CAAK5mB,EAASwa,GACZ,IAAIqM,EAAW7mB,EAAQ8mB,uBACvB,KAAOD,GAAU,CACf,GAAIA,EAASJ,QAAQjM,GACnB,MAAO,CAACqM,GAEVA,EAAWA,EAASC,sBACtB,CACA,MAAO,EACT,EAEA,IAAAxhB,CAAKtF,EAASwa,GACZ,IAAIlV,EAAOtF,EAAQ+mB,mBACnB,KAAOzhB,GAAM,CACX,GAAIA,EAAKmhB,QAAQjM,GACf,MAAO,CAAClV,GAEVA,EAAOA,EAAKyhB,kBACd,CACA,MAAO,EACT,EACA,iBAAAC,CAAkBhnB,GAChB,MAAMinB,EAAa,CAAC,IAAK,SAAU,QAAS,WAAY,SAAU,UAAW,aAAc,4BAA4B1jB,KAAIiX,GAAY,GAAGA,2BAAiC7W,KAAK,KAChL,OAAO8c,KAAK7N,KAAKqU,EAAYjnB,GAAS4G,QAAOsgB,IAAOvL,GAAWuL,IAAO9L,GAAU8L,IAClF,EACA,sBAAAC,CAAuBnnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAIwa,GACK8L,GAAeC,QAAQ/L,GAAYA,EAErC,IACT,EACA,sBAAA4M,CAAuBpnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW8L,GAAeC,QAAQ/L,GAAY,IACvD,EACA,+BAAA6M,CAAgCrnB,GAC9B,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW8L,GAAe1T,KAAK4H,GAAY,EACpD,GAUI8M,GAAuB,CAACC,EAAWC,EAAS,UAChD,MAAMC,EAAa,gBAAgBF,EAAU9B,YACvC1kB,EAAOwmB,EAAUvK,KACvBgE,GAAac,GAAGhc,SAAU2hB,EAAY,qBAAqB1mB,OAAU,SAAU8e,GAI7E,GAHI,CAAC,IAAK,QAAQgC,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEF,MAAMzT,EAASsZ,GAAec,uBAAuB3G,OAASA,KAAKhF,QAAQ,IAAI1a,KAC9DwmB,EAAUxB,oBAAoB/Y,GAGtCwa,IACX,GAAE,EAiBEG,GAAc,YACdC,GAAc,QAAQD,KACtBE,GAAe,SAASF,KAQ9B,MAAMG,WAAc3C,GAElB,eAAWnI,GACT,MAfW,OAgBb,CAGA,KAAA+K,GAEE,GADmB/G,GAAaqB,QAAQ5B,KAAK4E,SAAUuC,IACxCnF,iBACb,OAEFhC,KAAK4E,SAASvJ,UAAU1B,OAlBF,QAmBtB,MAAMyL,EAAapF,KAAK4E,SAASvJ,UAAU7W,SApBrB,QAqBtBwb,KAAKmF,gBAAe,IAAMnF,KAAKuH,mBAAmBvH,KAAK4E,SAAUQ,EACnE,CAGA,eAAAmC,GACEvH,KAAK4E,SAASjL,SACd4G,GAAaqB,QAAQ5B,KAAK4E,SAAUwC,IACpCpH,KAAK+E,SACP,CAGA,sBAAOtI,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOgd,GAAM/B,oBAAoBtF,MACvC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOF6G,GAAqBQ,GAAO,SAM5BlL,GAAmBkL,IAcnB,MAKMI,GAAyB,4BAO/B,MAAMC,WAAehD,GAEnB,eAAWnI,GACT,MAfW,QAgBb,CAGA,MAAAoL,GAEE3H,KAAK4E,SAASxjB,aAAa,eAAgB4e,KAAK4E,SAASvJ,UAAUsM,OAjB3C,UAkB1B,CAGA,sBAAOlL,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOqd,GAAOpC,oBAAoBtF,MACzB,WAAX8D,GACFzZ,EAAKyZ,IAET,GACF,EAOFvD,GAAac,GAAGhc,SAjCe,2BAiCmBoiB,IAAwBrI,IACxEA,EAAMkD,iBACN,MAAMsF,EAASxI,EAAM7S,OAAOyO,QAAQyM,IACvBC,GAAOpC,oBAAoBsC,GACnCD,QAAQ,IAOfxL,GAAmBuL,IAcnB,MACMG,GAAc,YACdC,GAAmB,aAAaD,KAChCE,GAAkB,YAAYF,KAC9BG,GAAiB,WAAWH,KAC5BI,GAAoB,cAAcJ,KAClCK,GAAkB,YAAYL,KAK9BM,GAAY,CAChBC,YAAa,KACbC,aAAc,KACdC,cAAe,MAEXC,GAAgB,CACpBH,YAAa,kBACbC,aAAc,kBACdC,cAAe,mBAOjB,MAAME,WAAc/E,GAClB,WAAAU,CAAY5kB,EAASukB,GACnBa,QACA3E,KAAK4E,SAAWrlB,EACXA,GAAYipB,GAAMC,gBAGvBzI,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAK0I,QAAU,EACf1I,KAAK2I,sBAAwB7H,QAAQlhB,OAAOgpB,cAC5C5I,KAAK6I,cACP,CAGA,kBAAWnF,GACT,OAAOyE,EACT,CACA,sBAAWxE,GACT,OAAO4E,EACT,CACA,eAAWhM,GACT,MA/CW,OAgDb,CAGA,OAAAwI,GACExE,GAAaC,IAAIR,KAAK4E,SAAUiD,GAClC,CAGA,MAAAiB,CAAO1J,GACAY,KAAK2I,sBAIN3I,KAAK+I,wBAAwB3J,KAC/BY,KAAK0I,QAAUtJ,EAAM4J,SAJrBhJ,KAAK0I,QAAUtJ,EAAM6J,QAAQ,GAAGD,OAMpC,CACA,IAAAE,CAAK9J,GACCY,KAAK+I,wBAAwB3J,KAC/BY,KAAK0I,QAAUtJ,EAAM4J,QAAUhJ,KAAK0I,SAEtC1I,KAAKmJ,eACLtM,GAAQmD,KAAK6E,QAAQuD,YACvB,CACA,KAAAgB,CAAMhK,GACJY,KAAK0I,QAAUtJ,EAAM6J,SAAW7J,EAAM6J,QAAQvY,OAAS,EAAI,EAAI0O,EAAM6J,QAAQ,GAAGD,QAAUhJ,KAAK0I,OACjG,CACA,YAAAS,GACE,MAAME,EAAYlnB,KAAKoC,IAAIyb,KAAK0I,SAChC,GAAIW,GAnEgB,GAoElB,OAEF,MAAM/b,EAAY+b,EAAYrJ,KAAK0I,QACnC1I,KAAK0I,QAAU,EACVpb,GAGLuP,GAAQvP,EAAY,EAAI0S,KAAK6E,QAAQyD,cAAgBtI,KAAK6E,QAAQwD,aACpE,CACA,WAAAQ,GACM7I,KAAK2I,uBACPpI,GAAac,GAAGrB,KAAK4E,SAAUqD,IAAmB7I,GAASY,KAAK8I,OAAO1J,KACvEmB,GAAac,GAAGrB,KAAK4E,SAAUsD,IAAiB9I,GAASY,KAAKkJ,KAAK9J,KACnEY,KAAK4E,SAASvJ,UAAU5E,IAlFG,mBAoF3B8J,GAAac,GAAGrB,KAAK4E,SAAUkD,IAAkB1I,GAASY,KAAK8I,OAAO1J,KACtEmB,GAAac,GAAGrB,KAAK4E,SAAUmD,IAAiB3I,GAASY,KAAKoJ,MAAMhK,KACpEmB,GAAac,GAAGrB,KAAK4E,SAAUoD,IAAgB5I,GAASY,KAAKkJ,KAAK9J,KAEtE,CACA,uBAAA2J,CAAwB3J,GACtB,OAAOY,KAAK2I,wBA3FS,QA2FiBvJ,EAAMkK,aA5FrB,UA4FyDlK,EAAMkK,YACxF,CAGA,kBAAOb,GACL,MAAO,iBAAkBpjB,SAASC,iBAAmB7C,UAAU8mB,eAAiB,CAClF,EAeF,MAEMC,GAAc,eACdC,GAAiB,YACjBC,GAAmB,YACnBC,GAAoB,aAGpBC,GAAa,OACbC,GAAa,OACbC,GAAiB,OACjBC,GAAkB,QAClBC,GAAc,QAAQR,KACtBS,GAAa,OAAOT,KACpBU,GAAkB,UAAUV,KAC5BW,GAAqB,aAAaX,KAClCY,GAAqB,aAAaZ,KAClCa,GAAmB,YAAYb,KAC/Bc,GAAwB,OAAOd,KAAcC,KAC7Cc,GAAyB,QAAQf,KAAcC,KAC/Ce,GAAsB,WACtBC,GAAsB,SAMtBC,GAAkB,UAClBC,GAAgB,iBAChBC,GAAuBF,GAAkBC,GAKzCE,GAAmB,CACvB,CAACnB,IAAmBK,GACpB,CAACJ,IAAoBG,IAEjBgB,GAAY,CAChBC,SAAU,IACVC,UAAU,EACVC,MAAO,QACPC,MAAM,EACNC,OAAO,EACPC,MAAM,GAEFC,GAAgB,CACpBN,SAAU,mBAEVC,SAAU,UACVC,MAAO,mBACPC,KAAM,mBACNC,MAAO,UACPC,KAAM,WAOR,MAAME,WAAiB5G,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKuL,UAAY,KACjBvL,KAAKwL,eAAiB,KACtBxL,KAAKyL,YAAa,EAClBzL,KAAK0L,aAAe,KACpB1L,KAAK2L,aAAe,KACpB3L,KAAK4L,mBAAqB/F,GAAeC,QArCjB,uBAqC8C9F,KAAK4E,UAC3E5E,KAAK6L,qBACD7L,KAAK6E,QAAQqG,OAASV,IACxBxK,KAAK8L,OAET,CAGA,kBAAWpI,GACT,OAAOoH,EACT,CACA,sBAAWnH,GACT,OAAO0H,EACT,CACA,eAAW9O,GACT,MAnFW,UAoFb,CAGA,IAAA1X,GACEmb,KAAK+L,OAAOnC,GACd,CACA,eAAAoC,IAIO3mB,SAAS4mB,QAAUtR,GAAUqF,KAAK4E,WACrC5E,KAAKnb,MAET,CACA,IAAAshB,GACEnG,KAAK+L,OAAOlC,GACd,CACA,KAAAoB,GACMjL,KAAKyL,YACPrR,GAAqB4F,KAAK4E,UAE5B5E,KAAKkM,gBACP,CACA,KAAAJ,GACE9L,KAAKkM,iBACLlM,KAAKmM,kBACLnM,KAAKuL,UAAYa,aAAY,IAAMpM,KAAKgM,mBAAmBhM,KAAK6E,QAAQkG,SAC1E,CACA,iBAAAsB,GACOrM,KAAK6E,QAAQqG,OAGdlL,KAAKyL,WACPlL,GAAae,IAAItB,KAAK4E,SAAUqF,IAAY,IAAMjK,KAAK8L,UAGzD9L,KAAK8L,QACP,CACA,EAAAQ,CAAG7T,GACD,MAAM8T,EAAQvM,KAAKwM,YACnB,GAAI/T,EAAQ8T,EAAM7b,OAAS,GAAK+H,EAAQ,EACtC,OAEF,GAAIuH,KAAKyL,WAEP,YADAlL,GAAae,IAAItB,KAAK4E,SAAUqF,IAAY,IAAMjK,KAAKsM,GAAG7T,KAG5D,MAAMgU,EAAczM,KAAK0M,cAAc1M,KAAK2M,cAC5C,GAAIF,IAAgBhU,EAClB,OAEF,MAAMtC,EAAQsC,EAAQgU,EAAc7C,GAAaC,GACjD7J,KAAK+L,OAAO5V,EAAOoW,EAAM9T,GAC3B,CACA,OAAAsM,GACM/E,KAAK2L,cACP3L,KAAK2L,aAAa5G,UAEpBJ,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAEhB,OADAA,EAAO8I,gBAAkB9I,EAAOiH,SACzBjH,CACT,CACA,kBAAA+H,GACM7L,KAAK6E,QAAQmG,UACfzK,GAAac,GAAGrB,KAAK4E,SAAUsF,IAAiB9K,GAASY,KAAK6M,SAASzN,KAE9C,UAAvBY,KAAK6E,QAAQoG,QACf1K,GAAac,GAAGrB,KAAK4E,SAAUuF,IAAoB,IAAMnK,KAAKiL,UAC9D1K,GAAac,GAAGrB,KAAK4E,SAAUwF,IAAoB,IAAMpK,KAAKqM,uBAE5DrM,KAAK6E,QAAQsG,OAAS3C,GAAMC,eAC9BzI,KAAK8M,yBAET,CACA,uBAAAA,GACE,IAAK,MAAMC,KAAOlH,GAAe1T,KArIX,qBAqImC6N,KAAK4E,UAC5DrE,GAAac,GAAG0L,EAAK1C,IAAkBjL,GAASA,EAAMkD,mBAExD,MAmBM0K,EAAc,CAClB3E,aAAc,IAAMrI,KAAK+L,OAAO/L,KAAKiN,kBAAkBnD,KACvDxB,cAAe,IAAMtI,KAAK+L,OAAO/L,KAAKiN,kBAAkBlD,KACxD3B,YAtBkB,KACS,UAAvBpI,KAAK6E,QAAQoG,QAYjBjL,KAAKiL,QACDjL,KAAK0L,cACPwB,aAAalN,KAAK0L,cAEpB1L,KAAK0L,aAAe7N,YAAW,IAAMmC,KAAKqM,qBAjLjB,IAiL+DrM,KAAK6E,QAAQkG,UAAS,GAOhH/K,KAAK2L,aAAe,IAAInD,GAAMxI,KAAK4E,SAAUoI,EAC/C,CACA,QAAAH,CAASzN,GACP,GAAI,kBAAkB/b,KAAK+b,EAAM7S,OAAO0a,SACtC,OAEF,MAAM3Z,EAAYud,GAAiBzL,EAAMtiB,KACrCwQ,IACF8R,EAAMkD,iBACNtC,KAAK+L,OAAO/L,KAAKiN,kBAAkB3f,IAEvC,CACA,aAAAof,CAAcntB,GACZ,OAAOygB,KAAKwM,YAAYrnB,QAAQ5F,EAClC,CACA,0BAAA4tB,CAA2B1U,GACzB,IAAKuH,KAAK4L,mBACR,OAEF,MAAMwB,EAAkBvH,GAAeC,QAAQ4E,GAAiB1K,KAAK4L,oBACrEwB,EAAgB/R,UAAU1B,OAAO8Q,IACjC2C,EAAgBjsB,gBAAgB,gBAChC,MAAMksB,EAAqBxH,GAAeC,QAAQ,sBAAsBrN,MAAWuH,KAAK4L,oBACpFyB,IACFA,EAAmBhS,UAAU5E,IAAIgU,IACjC4C,EAAmBjsB,aAAa,eAAgB,QAEpD,CACA,eAAA+qB,GACE,MAAM5sB,EAAUygB,KAAKwL,gBAAkBxL,KAAK2M,aAC5C,IAAKptB,EACH,OAEF,MAAM+tB,EAAkB/P,OAAOgQ,SAAShuB,EAAQic,aAAa,oBAAqB,IAClFwE,KAAK6E,QAAQkG,SAAWuC,GAAmBtN,KAAK6E,QAAQ+H,eAC1D,CACA,MAAAb,CAAO5V,EAAO5W,EAAU,MACtB,GAAIygB,KAAKyL,WACP,OAEF,MAAM1N,EAAgBiC,KAAK2M,aACrBa,EAASrX,IAAUyT,GACnB6D,EAAcluB,GAAWue,GAAqBkC,KAAKwM,YAAazO,EAAeyP,EAAQxN,KAAK6E,QAAQuG,MAC1G,GAAIqC,IAAgB1P,EAClB,OAEF,MAAM2P,EAAmB1N,KAAK0M,cAAce,GACtCE,EAAenI,GACZjF,GAAaqB,QAAQ5B,KAAK4E,SAAUY,EAAW,CACpD1F,cAAe2N,EACfngB,UAAW0S,KAAK4N,kBAAkBzX,GAClCuD,KAAMsG,KAAK0M,cAAc3O,GACzBuO,GAAIoB,IAIR,GADmBC,EAAa3D,IACjBhI,iBACb,OAEF,IAAKjE,IAAkB0P,EAGrB,OAEF,MAAMI,EAAY/M,QAAQd,KAAKuL,WAC/BvL,KAAKiL,QACLjL,KAAKyL,YAAa,EAClBzL,KAAKmN,2BAA2BO,GAChC1N,KAAKwL,eAAiBiC,EACtB,MAAMK,EAAuBN,EA3OR,sBADF,oBA6ObO,EAAiBP,EA3OH,qBACA,qBA2OpBC,EAAYpS,UAAU5E,IAAIsX,GAC1BlS,GAAO4R,GACP1P,EAAc1C,UAAU5E,IAAIqX,GAC5BL,EAAYpS,UAAU5E,IAAIqX,GAQ1B9N,KAAKmF,gBAPoB,KACvBsI,EAAYpS,UAAU1B,OAAOmU,EAAsBC,GACnDN,EAAYpS,UAAU5E,IAAIgU,IAC1B1M,EAAc1C,UAAU1B,OAAO8Q,GAAqBsD,EAAgBD,GACpE9N,KAAKyL,YAAa,EAClBkC,EAAa1D,GAAW,GAEYlM,EAAeiC,KAAKgO,eACtDH,GACF7N,KAAK8L,OAET,CACA,WAAAkC,GACE,OAAOhO,KAAK4E,SAASvJ,UAAU7W,SAhQV,QAiQvB,CACA,UAAAmoB,GACE,OAAO9G,GAAeC,QAAQ8E,GAAsB5K,KAAK4E,SAC3D,CACA,SAAA4H,GACE,OAAO3G,GAAe1T,KAAKwY,GAAe3K,KAAK4E,SACjD,CACA,cAAAsH,GACMlM,KAAKuL,YACP0C,cAAcjO,KAAKuL,WACnBvL,KAAKuL,UAAY,KAErB,CACA,iBAAA0B,CAAkB3f,GAChB,OAAI2O,KACK3O,IAAcwc,GAAiBD,GAAaD,GAE9Ctc,IAAcwc,GAAiBF,GAAaC,EACrD,CACA,iBAAA+D,CAAkBzX,GAChB,OAAI8F,KACK9F,IAAU0T,GAAaC,GAAiBC,GAE1C5T,IAAU0T,GAAaE,GAAkBD,EAClD,CAGA,sBAAOrN,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOihB,GAAShG,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,GAIX,GAAsB,iBAAXA,EAAqB,CAC9B,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,OAREzZ,EAAKiiB,GAAGxI,EASZ,GACF,EAOFvD,GAAac,GAAGhc,SAAUklB,GAvSE,uCAuS2C,SAAUnL,GAC/E,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MACrD,IAAKzT,IAAWA,EAAO8O,UAAU7W,SAASgmB,IACxC,OAEFpL,EAAMkD,iBACN,MAAM4L,EAAW5C,GAAShG,oBAAoB/Y,GACxC4hB,EAAanO,KAAKxE,aAAa,oBACrC,OAAI2S,GACFD,EAAS5B,GAAG6B,QACZD,EAAS7B,qBAGyC,SAAhDrJ,GAAYQ,iBAAiBxD,KAAM,UACrCkO,EAASrpB,YACTqpB,EAAS7B,sBAGX6B,EAAS/H,YACT+H,EAAS7B,oBACX,IACA9L,GAAac,GAAGzhB,OAAQ0qB,IAAuB,KAC7C,MAAM8D,EAAYvI,GAAe1T,KA5TR,6BA6TzB,IAAK,MAAM+b,KAAYE,EACrB9C,GAAShG,oBAAoB4I,EAC/B,IAOF/R,GAAmBmP,IAcnB,MAEM+C,GAAc,eAEdC,GAAe,OAAOD,KACtBE,GAAgB,QAAQF,KACxBG,GAAe,OAAOH,KACtBI,GAAiB,SAASJ,KAC1BK,GAAyB,QAAQL,cACjCM,GAAoB,OACpBC,GAAsB,WACtBC,GAAwB,aAExBC,GAA6B,WAAWF,OAAwBA,KAKhEG,GAAyB,8BACzBC,GAAY,CAChBvqB,OAAQ,KACRkjB,QAAQ,GAEJsH,GAAgB,CACpBxqB,OAAQ,iBACRkjB,OAAQ,WAOV,MAAMuH,WAAiBxK,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKmP,kBAAmB,EACxBnP,KAAKoP,cAAgB,GACrB,MAAMC,EAAaxJ,GAAe1T,KAAK4c,IACvC,IAAK,MAAMO,KAAQD,EAAY,CAC7B,MAAMtV,EAAW8L,GAAea,uBAAuB4I,GACjDC,EAAgB1J,GAAe1T,KAAK4H,GAAU5T,QAAOqpB,GAAgBA,IAAiBxP,KAAK4E,WAChF,OAAb7K,GAAqBwV,EAAc7e,QACrCsP,KAAKoP,cAAcxd,KAAK0d,EAE5B,CACAtP,KAAKyP,sBACAzP,KAAK6E,QAAQpgB,QAChBub,KAAK0P,0BAA0B1P,KAAKoP,cAAepP,KAAK2P,YAEtD3P,KAAK6E,QAAQ8C,QACf3H,KAAK2H,QAET,CAGA,kBAAWjE,GACT,OAAOsL,EACT,CACA,sBAAWrL,GACT,OAAOsL,EACT,CACA,eAAW1S,GACT,MA9DW,UA+Db,CAGA,MAAAoL,GACM3H,KAAK2P,WACP3P,KAAK4P,OAEL5P,KAAK6P,MAET,CACA,IAAAA,GACE,GAAI7P,KAAKmP,kBAAoBnP,KAAK2P,WAChC,OAEF,IAAIG,EAAiB,GAQrB,GALI9P,KAAK6E,QAAQpgB,SACfqrB,EAAiB9P,KAAK+P,uBAhEH,wCAgE4C5pB,QAAO5G,GAAWA,IAAYygB,KAAK4E,WAAU9hB,KAAIvD,GAAW2vB,GAAS5J,oBAAoB/lB,EAAS,CAC/JooB,QAAQ,OAGRmI,EAAepf,QAAUof,EAAe,GAAGX,iBAC7C,OAGF,GADmB5O,GAAaqB,QAAQ5B,KAAK4E,SAAU0J,IACxCtM,iBACb,OAEF,IAAK,MAAMgO,KAAkBF,EAC3BE,EAAeJ,OAEjB,MAAMK,EAAYjQ,KAAKkQ,gBACvBlQ,KAAK4E,SAASvJ,UAAU1B,OAAOiV,IAC/B5O,KAAK4E,SAASvJ,UAAU5E,IAAIoY,IAC5B7O,KAAK4E,SAAS7jB,MAAMkvB,GAAa,EACjCjQ,KAAK0P,0BAA0B1P,KAAKoP,eAAe,GACnDpP,KAAKmP,kBAAmB,EACxB,MAQMgB,EAAa,SADUF,EAAU,GAAGxL,cAAgBwL,EAAU7d,MAAM,KAE1E4N,KAAKmF,gBATY,KACfnF,KAAKmP,kBAAmB,EACxBnP,KAAK4E,SAASvJ,UAAU1B,OAAOkV,IAC/B7O,KAAK4E,SAASvJ,UAAU5E,IAAImY,GAAqBD,IACjD3O,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GACjC1P,GAAaqB,QAAQ5B,KAAK4E,SAAU2J,GAAc,GAItBvO,KAAK4E,UAAU,GAC7C5E,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GAAGjQ,KAAK4E,SAASuL,MACpD,CACA,IAAAP,GACE,GAAI5P,KAAKmP,mBAAqBnP,KAAK2P,WACjC,OAGF,GADmBpP,GAAaqB,QAAQ5B,KAAK4E,SAAU4J,IACxCxM,iBACb,OAEF,MAAMiO,EAAYjQ,KAAKkQ,gBACvBlQ,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GAAGjQ,KAAK4E,SAASthB,wBAAwB2sB,OAC1EpU,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIoY,IAC5B7O,KAAK4E,SAASvJ,UAAU1B,OAAOiV,GAAqBD,IACpD,IAAK,MAAM/M,KAAW5B,KAAKoP,cAAe,CACxC,MAAM7vB,EAAUsmB,GAAec,uBAAuB/E,GAClDriB,IAAYygB,KAAK2P,SAASpwB,IAC5BygB,KAAK0P,0BAA0B,CAAC9N,IAAU,EAE9C,CACA5B,KAAKmP,kBAAmB,EAOxBnP,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GACjCjQ,KAAKmF,gBAPY,KACfnF,KAAKmP,kBAAmB,EACxBnP,KAAK4E,SAASvJ,UAAU1B,OAAOkV,IAC/B7O,KAAK4E,SAASvJ,UAAU5E,IAAImY,IAC5BrO,GAAaqB,QAAQ5B,KAAK4E,SAAU6J,GAAe,GAGvBzO,KAAK4E,UAAU,EAC/C,CACA,QAAA+K,CAASpwB,EAAUygB,KAAK4E,UACtB,OAAOrlB,EAAQ8b,UAAU7W,SAASmqB,GACpC,CAGA,iBAAA3K,CAAkBF,GAGhB,OAFAA,EAAO6D,OAAS7G,QAAQgD,EAAO6D,QAC/B7D,EAAOrf,OAASiW,GAAWoJ,EAAOrf,QAC3Bqf,CACT,CACA,aAAAoM,GACE,OAAOlQ,KAAK4E,SAASvJ,UAAU7W,SA3IL,uBAChB,QACC,QA0Ib,CACA,mBAAAirB,GACE,IAAKzP,KAAK6E,QAAQpgB,OAChB,OAEF,MAAMshB,EAAW/F,KAAK+P,uBAAuBhB,IAC7C,IAAK,MAAMxvB,KAAWwmB,EAAU,CAC9B,MAAMqK,EAAWvK,GAAec,uBAAuBpnB,GACnD6wB,GACFpQ,KAAK0P,0BAA0B,CAACnwB,GAAUygB,KAAK2P,SAASS,GAE5D,CACF,CACA,sBAAAL,CAAuBhW,GACrB,MAAMgM,EAAWF,GAAe1T,KAAK2c,GAA4B9O,KAAK6E,QAAQpgB,QAE9E,OAAOohB,GAAe1T,KAAK4H,EAAUiG,KAAK6E,QAAQpgB,QAAQ0B,QAAO5G,IAAYwmB,EAAS3E,SAAS7hB,IACjG,CACA,yBAAAmwB,CAA0BW,EAAcC,GACtC,GAAKD,EAAa3f,OAGlB,IAAK,MAAMnR,KAAW8wB,EACpB9wB,EAAQ8b,UAAUsM,OArKK,aAqKyB2I,GAChD/wB,EAAQ6B,aAAa,gBAAiBkvB,EAE1C,CAGA,sBAAO7T,CAAgBqH,GACrB,MAAMe,EAAU,CAAC,EAIjB,MAHsB,iBAAXf,GAAuB,YAAYzgB,KAAKygB,KACjDe,EAAQ8C,QAAS,GAEZ3H,KAAKwH,MAAK,WACf,MAAMnd,EAAO6kB,GAAS5J,oBAAoBtF,KAAM6E,GAChD,GAAsB,iBAAXf,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,CACF,GACF,EAOFvD,GAAac,GAAGhc,SAAUqpB,GAAwBK,IAAwB,SAAU3P,IAErD,MAAzBA,EAAM7S,OAAO0a,SAAmB7H,EAAMW,gBAAmD,MAAjCX,EAAMW,eAAekH,UAC/E7H,EAAMkD,iBAER,IAAK,MAAM/iB,KAAWsmB,GAAee,gCAAgC5G,MACnEkP,GAAS5J,oBAAoB/lB,EAAS,CACpCooB,QAAQ,IACPA,QAEP,IAMAxL,GAAmB+S,IAcnB,MAAMqB,GAAS,WAETC,GAAc,eACdC,GAAiB,YAGjBC,GAAiB,UACjBC,GAAmB,YAGnBC,GAAe,OAAOJ,KACtBK,GAAiB,SAASL,KAC1BM,GAAe,OAAON,KACtBO,GAAgB,QAAQP,KACxBQ,GAAyB,QAAQR,KAAcC,KAC/CQ,GAAyB,UAAUT,KAAcC,KACjDS,GAAuB,QAAQV,KAAcC,KAC7CU,GAAoB,OAMpBC,GAAyB,4DACzBC,GAA6B,GAAGD,MAA0BD,KAC1DG,GAAgB,iBAIhBC,GAAgBtV,KAAU,UAAY,YACtCuV,GAAmBvV,KAAU,YAAc,UAC3CwV,GAAmBxV,KAAU,aAAe,eAC5CyV,GAAsBzV,KAAU,eAAiB,aACjD0V,GAAkB1V,KAAU,aAAe,cAC3C2V,GAAiB3V,KAAU,cAAgB,aAG3C4V,GAAY,CAChBC,WAAW,EACX7jB,SAAU,kBACV8jB,QAAS,UACT/pB,OAAQ,CAAC,EAAG,GACZgqB,aAAc,KACd1zB,UAAW,UAEP2zB,GAAgB,CACpBH,UAAW,mBACX7jB,SAAU,mBACV8jB,QAAS,SACT/pB,OAAQ,0BACRgqB,aAAc,yBACd1zB,UAAW,2BAOb,MAAM4zB,WAAiBxN,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKmS,QAAU,KACfnS,KAAKoS,QAAUpS,KAAK4E,SAAS7f,WAE7Bib,KAAKqS,MAAQxM,GAAehhB,KAAKmb,KAAK4E,SAAU0M,IAAe,IAAMzL,GAAeM,KAAKnG,KAAK4E,SAAU0M,IAAe,IAAMzL,GAAeC,QAAQwL,GAAetR,KAAKoS,SACxKpS,KAAKsS,UAAYtS,KAAKuS,eACxB,CAGA,kBAAW7O,GACT,OAAOmO,EACT,CACA,sBAAWlO,GACT,OAAOsO,EACT,CACA,eAAW1V,GACT,OAAOgU,EACT,CAGA,MAAA5I,GACE,OAAO3H,KAAK2P,WAAa3P,KAAK4P,OAAS5P,KAAK6P,MAC9C,CACA,IAAAA,GACE,GAAI3U,GAAW8E,KAAK4E,WAAa5E,KAAK2P,WACpC,OAEF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAK4E,UAGtB,IADkBrE,GAAaqB,QAAQ5B,KAAK4E,SAAUkM,GAAchR,GACtDkC,iBAAd,CASA,GANAhC,KAAKwS,gBAMD,iBAAkBntB,SAASC,kBAAoB0a,KAAKoS,QAAQpX,QAzExC,eA0EtB,IAAK,MAAMzb,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAac,GAAG9hB,EAAS,YAAaqc,IAG1CoE,KAAK4E,SAAS6N,QACdzS,KAAK4E,SAASxjB,aAAa,iBAAiB,GAC5C4e,KAAKqS,MAAMhX,UAAU5E,IAAI0a,IACzBnR,KAAK4E,SAASvJ,UAAU5E,IAAI0a,IAC5B5Q,GAAaqB,QAAQ5B,KAAK4E,SAAUmM,GAAejR,EAhBnD,CAiBF,CACA,IAAA8P,GACE,GAAI1U,GAAW8E,KAAK4E,YAAc5E,KAAK2P,WACrC,OAEF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAK4E,UAEtB5E,KAAK0S,cAAc5S,EACrB,CACA,OAAAiF,GACM/E,KAAKmS,SACPnS,KAAKmS,QAAQnZ,UAEf2L,MAAMI,SACR,CACA,MAAAha,GACEiV,KAAKsS,UAAYtS,KAAKuS,gBAClBvS,KAAKmS,SACPnS,KAAKmS,QAAQpnB,QAEjB,CAGA,aAAA2nB,CAAc5S,GAEZ,IADkBS,GAAaqB,QAAQ5B,KAAK4E,SAAUgM,GAAc9Q,GACtDkC,iBAAd,CAMA,GAAI,iBAAkB3c,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAGvCoE,KAAKmS,SACPnS,KAAKmS,QAAQnZ,UAEfgH,KAAKqS,MAAMhX,UAAU1B,OAAOwX,IAC5BnR,KAAK4E,SAASvJ,UAAU1B,OAAOwX,IAC/BnR,KAAK4E,SAASxjB,aAAa,gBAAiB,SAC5C4hB,GAAYE,oBAAoBlD,KAAKqS,MAAO,UAC5C9R,GAAaqB,QAAQ5B,KAAK4E,SAAUiM,GAAgB/Q,EAhBpD,CAiBF,CACA,UAAA+D,CAAWC,GAET,GAAgC,iBADhCA,EAASa,MAAMd,WAAWC,IACRxlB,YAA2B,GAAUwlB,EAAOxlB,YAAgE,mBAA3CwlB,EAAOxlB,UAAUgF,sBAElG,MAAM,IAAIkhB,UAAU,GAAG+L,GAAO9L,+GAEhC,OAAOX,CACT,CACA,aAAA0O,GACE,QAAsB,IAAX,EACT,MAAM,IAAIhO,UAAU,gEAEtB,IAAImO,EAAmB3S,KAAK4E,SACG,WAA3B5E,KAAK6E,QAAQvmB,UACfq0B,EAAmB3S,KAAKoS,QACf,GAAUpS,KAAK6E,QAAQvmB,WAChCq0B,EAAmBjY,GAAWsF,KAAK6E,QAAQvmB,WACA,iBAA3B0hB,KAAK6E,QAAQvmB,YAC7Bq0B,EAAmB3S,KAAK6E,QAAQvmB,WAElC,MAAM0zB,EAAehS,KAAK4S,mBAC1B5S,KAAKmS,QAAU,GAAoBQ,EAAkB3S,KAAKqS,MAAOL,EACnE,CACA,QAAArC,GACE,OAAO3P,KAAKqS,MAAMhX,UAAU7W,SAAS2sB,GACvC,CACA,aAAA0B,GACE,MAAMC,EAAiB9S,KAAKoS,QAC5B,GAAIU,EAAezX,UAAU7W,SArKN,WAsKrB,OAAOmtB,GAET,GAAImB,EAAezX,UAAU7W,SAvKJ,aAwKvB,OAAOotB,GAET,GAAIkB,EAAezX,UAAU7W,SAzKA,iBA0K3B,MA5JsB,MA8JxB,GAAIsuB,EAAezX,UAAU7W,SA3KE,mBA4K7B,MA9JyB,SAkK3B,MAAMuuB,EAAkF,QAA1E9tB,iBAAiB+a,KAAKqS,OAAOvX,iBAAiB,iBAAiB6K,OAC7E,OAAImN,EAAezX,UAAU7W,SArLP,UAsLbuuB,EAAQvB,GAAmBD,GAE7BwB,EAAQrB,GAAsBD,EACvC,CACA,aAAAc,GACE,OAAkD,OAA3CvS,KAAK4E,SAAS5J,QAnLD,UAoLtB,CACA,UAAAgY,GACE,MAAM,OACJhrB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAOgQ,SAAS5vB,EAAO,MAEzC,mBAAXqK,EACFirB,GAAcjrB,EAAOirB,EAAYjT,KAAK4E,UAExC5c,CACT,CACA,gBAAA4qB,GACE,MAAMM,EAAwB,CAC5Bx0B,UAAWshB,KAAK6S,gBAChBzc,UAAW,CAAC,CACV9V,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAKgT,iBAanB,OAPIhT,KAAKsS,WAAsC,WAAzBtS,KAAK6E,QAAQkN,WACjC/O,GAAYC,iBAAiBjD,KAAKqS,MAAO,SAAU,UACnDa,EAAsB9c,UAAY,CAAC,CACjC9V,KAAM,cACNC,SAAS,KAGN,IACF2yB,KACArW,GAAQmD,KAAK6E,QAAQmN,aAAc,CAACkB,IAE3C,CACA,eAAAC,EAAgB,IACdr2B,EAAG,OACHyP,IAEA,MAAMggB,EAAQ1G,GAAe1T,KAhOF,8DAgO+B6N,KAAKqS,OAAOlsB,QAAO5G,GAAWob,GAAUpb,KAC7FgtB,EAAM7b,QAMXoN,GAAqByO,EAAOhgB,EAAQzP,IAAQ6zB,IAAmBpE,EAAMnL,SAAS7U,IAASkmB,OACzF,CAGA,sBAAOhW,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO6nB,GAAS5M,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,CACA,iBAAOsP,CAAWhU,GAChB,GA5QuB,IA4QnBA,EAAMwI,QAAgD,UAAfxI,EAAMqB,MA/QnC,QA+QuDrB,EAAMtiB,IACzE,OAEF,MAAMu2B,EAAcxN,GAAe1T,KAAKkf,IACxC,IAAK,MAAM1J,KAAU0L,EAAa,CAChC,MAAMC,EAAUpB,GAAS7M,YAAYsC,GACrC,IAAK2L,IAAyC,IAA9BA,EAAQzO,QAAQiN,UAC9B,SAEF,MAAMyB,EAAenU,EAAMmU,eACrBC,EAAeD,EAAanS,SAASkS,EAAQjB,OACnD,GAAIkB,EAAanS,SAASkS,EAAQ1O,WAA2C,WAA9B0O,EAAQzO,QAAQiN,YAA2B0B,GAA8C,YAA9BF,EAAQzO,QAAQiN,WAA2B0B,EACnJ,SAIF,GAAIF,EAAQjB,MAAM7tB,SAAS4a,EAAM7S,UAA2B,UAAf6S,EAAMqB,MA/RvC,QA+R2DrB,EAAMtiB,KAAqB,qCAAqCuG,KAAK+b,EAAM7S,OAAO0a,UACvJ,SAEF,MAAMnH,EAAgB,CACpBA,cAAewT,EAAQ1O,UAEN,UAAfxF,EAAMqB,OACRX,EAAckH,WAAa5H,GAE7BkU,EAAQZ,cAAc5S,EACxB,CACF,CACA,4BAAO2T,CAAsBrU,GAI3B,MAAMsU,EAAU,kBAAkBrwB,KAAK+b,EAAM7S,OAAO0a,SAC9C0M,EAjTW,WAiTKvU,EAAMtiB,IACtB82B,EAAkB,CAAClD,GAAgBC,IAAkBvP,SAAShC,EAAMtiB,KAC1E,IAAK82B,IAAoBD,EACvB,OAEF,GAAID,IAAYC,EACd,OAEFvU,EAAMkD,iBAGN,MAAMuR,EAAkB7T,KAAKgG,QAAQoL,IAA0BpR,KAAO6F,GAAeM,KAAKnG,KAAMoR,IAAwB,IAAMvL,GAAehhB,KAAKmb,KAAMoR,IAAwB,IAAMvL,GAAeC,QAAQsL,GAAwBhS,EAAMW,eAAehb,YACpPwF,EAAW2nB,GAAS5M,oBAAoBuO,GAC9C,GAAID,EAIF,OAHAxU,EAAM0U,kBACNvpB,EAASslB,YACTtlB,EAAS4oB,gBAAgB/T,GAGvB7U,EAASolB,aAEXvQ,EAAM0U,kBACNvpB,EAASqlB,OACTiE,EAAgBpB,QAEpB,EAOFlS,GAAac,GAAGhc,SAAU4rB,GAAwBG,GAAwBc,GAASuB,uBACnFlT,GAAac,GAAGhc,SAAU4rB,GAAwBK,GAAeY,GAASuB,uBAC1ElT,GAAac,GAAGhc,SAAU2rB,GAAwBkB,GAASkB,YAC3D7S,GAAac,GAAGhc,SAAU6rB,GAAsBgB,GAASkB,YACzD7S,GAAac,GAAGhc,SAAU2rB,GAAwBI,IAAwB,SAAUhS,GAClFA,EAAMkD,iBACN4P,GAAS5M,oBAAoBtF,MAAM2H,QACrC,IAMAxL,GAAmB+V,IAcnB,MAAM6B,GAAS,WAETC,GAAoB,OACpBC,GAAkB,gBAAgBF,KAClCG,GAAY,CAChBC,UAAW,iBACXC,cAAe,KACfhP,YAAY,EACZzK,WAAW,EAEX0Z,YAAa,QAETC,GAAgB,CACpBH,UAAW,SACXC,cAAe,kBACfhP,WAAY,UACZzK,UAAW,UACX0Z,YAAa,oBAOf,MAAME,WAAiB9Q,GACrB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKwU,aAAc,EACnBxU,KAAK4E,SAAW,IAClB,CAGA,kBAAWlB,GACT,OAAOwQ,EACT,CACA,sBAAWvQ,GACT,OAAO2Q,EACT,CACA,eAAW/X,GACT,OAAOwX,EACT,CAGA,IAAAlE,CAAKxT,GACH,IAAK2D,KAAK6E,QAAQlK,UAEhB,YADAkC,GAAQR,GAGV2D,KAAKyU,UACL,MAAMl1B,EAAUygB,KAAK0U,cACjB1U,KAAK6E,QAAQO,YACfvJ,GAAOtc,GAETA,EAAQ8b,UAAU5E,IAAIud,IACtBhU,KAAK2U,mBAAkB,KACrB9X,GAAQR,EAAS,GAErB,CACA,IAAAuT,CAAKvT,GACE2D,KAAK6E,QAAQlK,WAIlBqF,KAAK0U,cAAcrZ,UAAU1B,OAAOqa,IACpChU,KAAK2U,mBAAkB,KACrB3U,KAAK+E,UACLlI,GAAQR,EAAS,KANjBQ,GAAQR,EAQZ,CACA,OAAA0I,GACO/E,KAAKwU,cAGVjU,GAAaC,IAAIR,KAAK4E,SAAUqP,IAChCjU,KAAK4E,SAASjL,SACdqG,KAAKwU,aAAc,EACrB,CAGA,WAAAE,GACE,IAAK1U,KAAK4E,SAAU,CAClB,MAAMgQ,EAAWvvB,SAASwvB,cAAc,OACxCD,EAAST,UAAYnU,KAAK6E,QAAQsP,UAC9BnU,KAAK6E,QAAQO,YACfwP,EAASvZ,UAAU5E,IApFD,QAsFpBuJ,KAAK4E,SAAWgQ,CAClB,CACA,OAAO5U,KAAK4E,QACd,CACA,iBAAAZ,CAAkBF,GAGhB,OADAA,EAAOuQ,YAAc3Z,GAAWoJ,EAAOuQ,aAChCvQ,CACT,CACA,OAAA2Q,GACE,GAAIzU,KAAKwU,YACP,OAEF,MAAMj1B,EAAUygB,KAAK0U,cACrB1U,KAAK6E,QAAQwP,YAAYS,OAAOv1B,GAChCghB,GAAac,GAAG9hB,EAAS00B,IAAiB,KACxCpX,GAAQmD,KAAK6E,QAAQuP,cAAc,IAErCpU,KAAKwU,aAAc,CACrB,CACA,iBAAAG,CAAkBtY,GAChBW,GAAuBX,EAAU2D,KAAK0U,cAAe1U,KAAK6E,QAAQO,WACpE,EAeF,MAEM2P,GAAc,gBACdC,GAAkB,UAAUD,KAC5BE,GAAoB,cAAcF,KAGlCG,GAAmB,WACnBC,GAAY,CAChBC,WAAW,EACXC,YAAa,MAETC,GAAgB,CACpBF,UAAW,UACXC,YAAa,WAOf,MAAME,WAAkB9R,GACtB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKwV,WAAY,EACjBxV,KAAKyV,qBAAuB,IAC9B,CAGA,kBAAW/R,GACT,OAAOyR,EACT,CACA,sBAAWxR,GACT,OAAO2R,EACT,CACA,eAAW/Y,GACT,MArCW,WAsCb,CAGA,QAAAmZ,GACM1V,KAAKwV,YAGLxV,KAAK6E,QAAQuQ,WACfpV,KAAK6E,QAAQwQ,YAAY5C,QAE3BlS,GAAaC,IAAInb,SAAU0vB,IAC3BxU,GAAac,GAAGhc,SAAU2vB,IAAiB5V,GAASY,KAAK2V,eAAevW,KACxEmB,GAAac,GAAGhc,SAAU4vB,IAAmB7V,GAASY,KAAK4V,eAAexW,KAC1EY,KAAKwV,WAAY,EACnB,CACA,UAAAK,GACO7V,KAAKwV,YAGVxV,KAAKwV,WAAY,EACjBjV,GAAaC,IAAInb,SAAU0vB,IAC7B,CAGA,cAAAY,CAAevW,GACb,MAAM,YACJiW,GACErV,KAAK6E,QACT,GAAIzF,EAAM7S,SAAWlH,UAAY+Z,EAAM7S,SAAW8oB,GAAeA,EAAY7wB,SAAS4a,EAAM7S,QAC1F,OAEF,MAAM1L,EAAWglB,GAAeU,kBAAkB8O,GAC1B,IAApBx0B,EAAS6P,OACX2kB,EAAY5C,QACHzS,KAAKyV,uBAAyBP,GACvCr0B,EAASA,EAAS6P,OAAS,GAAG+hB,QAE9B5xB,EAAS,GAAG4xB,OAEhB,CACA,cAAAmD,CAAexW,GAzED,QA0ERA,EAAMtiB,MAGVkjB,KAAKyV,qBAAuBrW,EAAM0W,SAAWZ,GA5EzB,UA6EtB,EAeF,MAAMa,GAAyB,oDACzBC,GAA0B,cAC1BC,GAAmB,gBACnBC,GAAkB,eAMxB,MAAMC,GACJ,WAAAhS,GACEnE,KAAK4E,SAAWvf,SAAS6G,IAC3B,CAGA,QAAAkqB,GAEE,MAAMC,EAAgBhxB,SAASC,gBAAgBuC,YAC/C,OAAO1F,KAAKoC,IAAI3E,OAAO02B,WAAaD,EACtC,CACA,IAAAzG,GACE,MAAM/rB,EAAQmc,KAAKoW,WACnBpW,KAAKuW,mBAELvW,KAAKwW,sBAAsBxW,KAAK4E,SAAUqR,IAAkBQ,GAAmBA,EAAkB5yB,IAEjGmc,KAAKwW,sBAAsBT,GAAwBE,IAAkBQ,GAAmBA,EAAkB5yB,IAC1Gmc,KAAKwW,sBAAsBR,GAAyBE,IAAiBO,GAAmBA,EAAkB5yB,GAC5G,CACA,KAAAwO,GACE2N,KAAK0W,wBAAwB1W,KAAK4E,SAAU,YAC5C5E,KAAK0W,wBAAwB1W,KAAK4E,SAAUqR,IAC5CjW,KAAK0W,wBAAwBX,GAAwBE,IACrDjW,KAAK0W,wBAAwBV,GAAyBE,GACxD,CACA,aAAAS,GACE,OAAO3W,KAAKoW,WAAa,CAC3B,CAGA,gBAAAG,GACEvW,KAAK4W,sBAAsB5W,KAAK4E,SAAU,YAC1C5E,KAAK4E,SAAS7jB,MAAM+K,SAAW,QACjC,CACA,qBAAA0qB,CAAsBzc,EAAU8c,EAAexa,GAC7C,MAAMya,EAAiB9W,KAAKoW,WAS5BpW,KAAK+W,2BAA2Bhd,GARHxa,IAC3B,GAAIA,IAAYygB,KAAK4E,UAAYhlB,OAAO02B,WAAa/2B,EAAQsI,YAAcivB,EACzE,OAEF9W,KAAK4W,sBAAsBr3B,EAASs3B,GACpC,MAAMJ,EAAkB72B,OAAOqF,iBAAiB1F,GAASub,iBAAiB+b,GAC1Et3B,EAAQwB,MAAMi2B,YAAYH,EAAe,GAAGxa,EAASkB,OAAOC,WAAWiZ,QAAsB,GAGjG,CACA,qBAAAG,CAAsBr3B,EAASs3B,GAC7B,MAAMI,EAAc13B,EAAQwB,MAAM+Z,iBAAiB+b,GAC/CI,GACFjU,GAAYC,iBAAiB1jB,EAASs3B,EAAeI,EAEzD,CACA,uBAAAP,CAAwB3c,EAAU8c,GAWhC7W,KAAK+W,2BAA2Bhd,GAVHxa,IAC3B,MAAM5B,EAAQqlB,GAAYQ,iBAAiBjkB,EAASs3B,GAEtC,OAAVl5B,GAIJqlB,GAAYE,oBAAoB3jB,EAASs3B,GACzCt3B,EAAQwB,MAAMi2B,YAAYH,EAAel5B,IAJvC4B,EAAQwB,MAAMm2B,eAAeL,EAIgB,GAGnD,CACA,0BAAAE,CAA2Bhd,EAAUod,GACnC,GAAI,GAAUpd,GACZod,EAASpd,QAGX,IAAK,MAAM6L,KAAOC,GAAe1T,KAAK4H,EAAUiG,KAAK4E,UACnDuS,EAASvR,EAEb,EAeF,MAEMwR,GAAc,YAGdC,GAAe,OAAOD,KACtBE,GAAyB,gBAAgBF,KACzCG,GAAiB,SAASH,KAC1BI,GAAe,OAAOJ,KACtBK,GAAgB,QAAQL,KACxBM,GAAiB,SAASN,KAC1BO,GAAsB,gBAAgBP,KACtCQ,GAA0B,oBAAoBR,KAC9CS,GAA0B,kBAAkBT,KAC5CU,GAAyB,QAAQV,cACjCW,GAAkB,aAElBC,GAAoB,OACpBC,GAAoB,eAKpBC,GAAY,CAChBtD,UAAU,EACVnC,OAAO,EACPzH,UAAU,GAENmN,GAAgB,CACpBvD,SAAU,mBACVnC,MAAO,UACPzH,SAAU,WAOZ,MAAMoN,WAAc1T,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKqY,QAAUxS,GAAeC,QArBV,gBAqBmC9F,KAAK4E,UAC5D5E,KAAKsY,UAAYtY,KAAKuY,sBACtBvY,KAAKwY,WAAaxY,KAAKyY,uBACvBzY,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAK0Y,WAAa,IAAIvC,GACtBnW,KAAK6L,oBACP,CAGA,kBAAWnI,GACT,OAAOwU,EACT,CACA,sBAAWvU,GACT,OAAOwU,EACT,CACA,eAAW5b,GACT,MA1DW,OA2Db,CAGA,MAAAoL,CAAO7H,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CACA,IAAA+P,CAAK/P,GACCE,KAAK2P,UAAY3P,KAAKmP,kBAGR5O,GAAaqB,QAAQ5B,KAAK4E,SAAU4S,GAAc,CAClE1X,kBAEYkC,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAK0Y,WAAW9I,OAChBvqB,SAAS6G,KAAKmP,UAAU5E,IAAIshB,IAC5B/X,KAAK2Y,gBACL3Y,KAAKsY,UAAUzI,MAAK,IAAM7P,KAAK4Y,aAAa9Y,KAC9C,CACA,IAAA8P,GACO5P,KAAK2P,WAAY3P,KAAKmP,mBAGT5O,GAAaqB,QAAQ5B,KAAK4E,SAAUyS,IACxCrV,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAKwY,WAAW3C,aAChB7V,KAAK4E,SAASvJ,UAAU1B,OAAOqe,IAC/BhY,KAAKmF,gBAAe,IAAMnF,KAAK6Y,cAAc7Y,KAAK4E,SAAU5E,KAAKgO,gBACnE,CACA,OAAAjJ,GACExE,GAAaC,IAAI5gB,OAAQw3B,IACzB7W,GAAaC,IAAIR,KAAKqY,QAASjB,IAC/BpX,KAAKsY,UAAUvT,UACf/E,KAAKwY,WAAW3C,aAChBlR,MAAMI,SACR,CACA,YAAA+T,GACE9Y,KAAK2Y,eACP,CAGA,mBAAAJ,GACE,OAAO,IAAIhE,GAAS,CAClB5Z,UAAWmG,QAAQd,KAAK6E,QAAQ+P,UAEhCxP,WAAYpF,KAAKgO,eAErB,CACA,oBAAAyK,GACE,OAAO,IAAIlD,GAAU,CACnBF,YAAarV,KAAK4E,UAEtB,CACA,YAAAgU,CAAa9Y,GAENza,SAAS6G,KAAK1H,SAASwb,KAAK4E,WAC/Bvf,SAAS6G,KAAK4oB,OAAO9U,KAAK4E,UAE5B5E,KAAK4E,SAAS7jB,MAAMgxB,QAAU,QAC9B/R,KAAK4E,SAASzjB,gBAAgB,eAC9B6e,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASnZ,UAAY,EAC1B,MAAMstB,EAAYlT,GAAeC,QA7GT,cA6GsC9F,KAAKqY,SAC/DU,IACFA,EAAUttB,UAAY,GAExBoQ,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIuhB,IAU5BhY,KAAKmF,gBATsB,KACrBnF,KAAK6E,QAAQ4N,OACfzS,KAAKwY,WAAW9C,WAElB1V,KAAKmP,kBAAmB,EACxB5O,GAAaqB,QAAQ5B,KAAK4E,SAAU6S,GAAe,CACjD3X,iBACA,GAEoCE,KAAKqY,QAASrY,KAAKgO,cAC7D,CACA,kBAAAnC,GACEtL,GAAac,GAAGrB,KAAK4E,SAAUiT,IAAyBzY,IAhJvC,WAiJXA,EAAMtiB,MAGNkjB,KAAK6E,QAAQmG,SACfhL,KAAK4P,OAGP5P,KAAKgZ,6BAA4B,IAEnCzY,GAAac,GAAGzhB,OAAQ83B,IAAgB,KAClC1X,KAAK2P,WAAa3P,KAAKmP,kBACzBnP,KAAK2Y,eACP,IAEFpY,GAAac,GAAGrB,KAAK4E,SAAUgT,IAAyBxY,IAEtDmB,GAAae,IAAItB,KAAK4E,SAAU+S,IAAqBsB,IAC/CjZ,KAAK4E,WAAaxF,EAAM7S,QAAUyT,KAAK4E,WAAaqU,EAAO1sB,SAGjC,WAA1ByT,KAAK6E,QAAQ+P,SAIb5U,KAAK6E,QAAQ+P,UACf5U,KAAK4P,OAJL5P,KAAKgZ,6BAKP,GACA,GAEN,CACA,UAAAH,GACE7Y,KAAK4E,SAAS7jB,MAAMgxB,QAAU,OAC9B/R,KAAK4E,SAASxjB,aAAa,eAAe,GAC1C4e,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QAC9B6e,KAAKmP,kBAAmB,EACxBnP,KAAKsY,UAAU1I,MAAK,KAClBvqB,SAAS6G,KAAKmP,UAAU1B,OAAOoe,IAC/B/X,KAAKkZ,oBACLlZ,KAAK0Y,WAAWrmB,QAChBkO,GAAaqB,QAAQ5B,KAAK4E,SAAU2S,GAAe,GAEvD,CACA,WAAAvJ,GACE,OAAOhO,KAAK4E,SAASvJ,UAAU7W,SAjLT,OAkLxB,CACA,0BAAAw0B,GAEE,GADkBzY,GAAaqB,QAAQ5B,KAAK4E,SAAU0S,IACxCtV,iBACZ,OAEF,MAAMmX,EAAqBnZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3EwxB,EAAmBpZ,KAAK4E,SAAS7jB,MAAMiL,UAEpB,WAArBotB,GAAiCpZ,KAAK4E,SAASvJ,UAAU7W,SAASyzB,MAGjEkB,IACHnZ,KAAK4E,SAAS7jB,MAAMiL,UAAY,UAElCgU,KAAK4E,SAASvJ,UAAU5E,IAAIwhB,IAC5BjY,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAASvJ,UAAU1B,OAAOse,IAC/BjY,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAAS7jB,MAAMiL,UAAYotB,CAAgB,GAC/CpZ,KAAKqY,QAAQ,GACfrY,KAAKqY,SACRrY,KAAK4E,SAAS6N,QAChB,CAMA,aAAAkG,GACE,MAAMQ,EAAqBnZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3EkvB,EAAiB9W,KAAK0Y,WAAWtC,WACjCiD,EAAoBvC,EAAiB,EAC3C,GAAIuC,IAAsBF,EAAoB,CAC5C,MAAMr3B,EAAWma,KAAU,cAAgB,eAC3C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAGg1B,KACrC,CACA,IAAKuC,GAAqBF,EAAoB,CAC5C,MAAMr3B,EAAWma,KAAU,eAAiB,cAC5C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAGg1B,KACrC,CACF,CACA,iBAAAoC,GACElZ,KAAK4E,SAAS7jB,MAAMu4B,YAAc,GAClCtZ,KAAK4E,SAAS7jB,MAAMw4B,aAAe,EACrC,CAGA,sBAAO9c,CAAgBqH,EAAQhE,GAC7B,OAAOE,KAAKwH,MAAK,WACf,MAAMnd,EAAO+tB,GAAM9S,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQhE,EAJb,CAKF,GACF,EAOFS,GAAac,GAAGhc,SAAUyyB,GA9OK,4BA8O2C,SAAU1Y,GAClF,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MACjD,CAAC,IAAK,QAAQoB,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAER/B,GAAae,IAAI/U,EAAQirB,IAAcgC,IACjCA,EAAUxX,kBAIdzB,GAAae,IAAI/U,EAAQgrB,IAAgB,KACnC5c,GAAUqF,OACZA,KAAKyS,OACP,GACA,IAIJ,MAAMgH,EAAc5T,GAAeC,QAnQb,eAoQlB2T,GACFrB,GAAM/S,YAAYoU,GAAa7J,OAEpBwI,GAAM9S,oBAAoB/Y,GAClCob,OAAO3H,KACd,IACA6G,GAAqBuR,IAMrBjc,GAAmBic,IAcnB,MAEMsB,GAAc,gBACdC,GAAiB,YACjBC,GAAwB,OAAOF,KAAcC,KAE7CE,GAAoB,OACpBC,GAAuB,UACvBC,GAAoB,SAEpBC,GAAgB,kBAChBC,GAAe,OAAOP,KACtBQ,GAAgB,QAAQR,KACxBS,GAAe,OAAOT,KACtBU,GAAuB,gBAAgBV,KACvCW,GAAiB,SAASX,KAC1BY,GAAe,SAASZ,KACxBa,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAwB,kBAAkBd,KAE1Ce,GAAY,CAChB7F,UAAU,EACV5J,UAAU,EACVvgB,QAAQ,GAEJiwB,GAAgB,CACpB9F,SAAU,mBACV5J,SAAU,UACVvgB,OAAQ,WAOV,MAAMkwB,WAAkBjW,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAK2P,UAAW,EAChB3P,KAAKsY,UAAYtY,KAAKuY,sBACtBvY,KAAKwY,WAAaxY,KAAKyY,uBACvBzY,KAAK6L,oBACP,CAGA,kBAAWnI,GACT,OAAO+W,EACT,CACA,sBAAW9W,GACT,OAAO+W,EACT,CACA,eAAWne,GACT,MApDW,WAqDb,CAGA,MAAAoL,CAAO7H,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CACA,IAAA+P,CAAK/P,GACCE,KAAK2P,UAGSpP,GAAaqB,QAAQ5B,KAAK4E,SAAUqV,GAAc,CAClEna,kBAEYkC,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKsY,UAAUzI,OACV7P,KAAK6E,QAAQpa,SAChB,IAAI0rB,IAAkBvG,OAExB5P,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASvJ,UAAU5E,IAAIqjB,IAW5B9Z,KAAKmF,gBAVoB,KAClBnF,KAAK6E,QAAQpa,SAAUuV,KAAK6E,QAAQ+P,UACvC5U,KAAKwY,WAAW9C,WAElB1V,KAAK4E,SAASvJ,UAAU5E,IAAIojB,IAC5B7Z,KAAK4E,SAASvJ,UAAU1B,OAAOmgB,IAC/BvZ,GAAaqB,QAAQ5B,KAAK4E,SAAUsV,GAAe,CACjDpa,iBACA,GAEkCE,KAAK4E,UAAU,GACvD,CACA,IAAAgL,GACO5P,KAAK2P,WAGQpP,GAAaqB,QAAQ5B,KAAK4E,SAAUuV,IACxCnY,mBAGdhC,KAAKwY,WAAW3C,aAChB7V,KAAK4E,SAASgW,OACd5a,KAAK2P,UAAW,EAChB3P,KAAK4E,SAASvJ,UAAU5E,IAAIsjB,IAC5B/Z,KAAKsY,UAAU1I,OAUf5P,KAAKmF,gBAToB,KACvBnF,KAAK4E,SAASvJ,UAAU1B,OAAOkgB,GAAmBE,IAClD/Z,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QACzB6e,KAAK6E,QAAQpa,SAChB,IAAI0rB,IAAkB9jB,QAExBkO,GAAaqB,QAAQ5B,KAAK4E,SAAUyV,GAAe,GAEfra,KAAK4E,UAAU,IACvD,CACA,OAAAG,GACE/E,KAAKsY,UAAUvT,UACf/E,KAAKwY,WAAW3C,aAChBlR,MAAMI,SACR,CAGA,mBAAAwT,GACE,MASM5d,EAAYmG,QAAQd,KAAK6E,QAAQ+P,UACvC,OAAO,IAAIL,GAAS,CAClBJ,UA3HsB,qBA4HtBxZ,YACAyK,YAAY,EACZiP,YAAarU,KAAK4E,SAAS7f,WAC3BqvB,cAAezZ,EAfK,KACU,WAA1BqF,KAAK6E,QAAQ+P,SAIjB5U,KAAK4P,OAHHrP,GAAaqB,QAAQ5B,KAAK4E,SAAUwV,GAG3B,EAUgC,MAE/C,CACA,oBAAA3B,GACE,OAAO,IAAIlD,GAAU,CACnBF,YAAarV,KAAK4E,UAEtB,CACA,kBAAAiH,GACEtL,GAAac,GAAGrB,KAAK4E,SAAU4V,IAAuBpb,IA5IvC,WA6ITA,EAAMtiB,MAGNkjB,KAAK6E,QAAQmG,SACfhL,KAAK4P,OAGPrP,GAAaqB,QAAQ5B,KAAK4E,SAAUwV,IAAqB,GAE7D,CAGA,sBAAO3d,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOswB,GAAUrV,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOFO,GAAac,GAAGhc,SAAUk1B,GA7JK,gCA6J2C,SAAUnb,GAClF,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MAIrD,GAHI,CAAC,IAAK,QAAQoB,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEFO,GAAae,IAAI/U,EAAQ8tB,IAAgB,KAEnC1f,GAAUqF,OACZA,KAAKyS,OACP,IAIF,MAAMgH,EAAc5T,GAAeC,QAAQkU,IACvCP,GAAeA,IAAgBltB,GACjCouB,GAAUtV,YAAYoU,GAAa7J,OAExB+K,GAAUrV,oBAAoB/Y,GACtCob,OAAO3H,KACd,IACAO,GAAac,GAAGzhB,OAAQg6B,IAAuB,KAC7C,IAAK,MAAM7f,KAAY8L,GAAe1T,KAAK6nB,IACzCW,GAAUrV,oBAAoBvL,GAAU8V,MAC1C,IAEFtP,GAAac,GAAGzhB,OAAQ06B,IAAc,KACpC,IAAK,MAAM/6B,KAAWsmB,GAAe1T,KAAK,gDACG,UAAvClN,iBAAiB1F,GAASiC,UAC5Bm5B,GAAUrV,oBAAoB/lB,GAASqwB,MAE3C,IAEF/I,GAAqB8T,IAMrBxe,GAAmBwe,IAUnB,MACME,GAAmB,CAEvB,IAAK,CAAC,QAAS,MAAO,KAAM,OAAQ,OAHP,kBAI7BhqB,EAAG,CAAC,SAAU,OAAQ,QAAS,OAC/BiqB,KAAM,GACNhqB,EAAG,GACHiqB,GAAI,GACJC,IAAK,GACLC,KAAM,GACNC,GAAI,GACJC,IAAK,GACLC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJxqB,EAAG,GACH0b,IAAK,CAAC,MAAO,SAAU,MAAO,QAAS,QAAS,UAChD+O,GAAI,GACJC,GAAI,GACJC,EAAG,GACHC,IAAK,GACLC,EAAG,GACHC,MAAO,GACPC,KAAM,GACNC,IAAK,GACLC,IAAK,GACLC,OAAQ,GACRC,EAAG,GACHC,GAAI,IAIAC,GAAgB,IAAIpmB,IAAI,CAAC,aAAc,OAAQ,OAAQ,WAAY,WAAY,SAAU,MAAO,eAShGqmB,GAAmB,0DACnBC,GAAmB,CAAC76B,EAAW86B,KACnC,MAAMC,EAAgB/6B,EAAUvC,SAASC,cACzC,OAAIo9B,EAAqBzb,SAAS0b,IAC5BJ,GAAc/lB,IAAImmB,IACbhc,QAAQ6b,GAAiBt5B,KAAKtB,EAAUg7B,YAM5CF,EAAqB12B,QAAO62B,GAAkBA,aAA0BzY,SAAQ9R,MAAKwqB,GAASA,EAAM55B,KAAKy5B,IAAe,EA0C3HI,GAAY,CAChBC,UAAWtC,GACXuC,QAAS,CAAC,EAEVC,WAAY,GACZxwB,MAAM,EACNywB,UAAU,EACVC,WAAY,KACZC,SAAU,eAENC,GAAgB,CACpBN,UAAW,SACXC,QAAS,SACTC,WAAY,oBACZxwB,KAAM,UACNywB,SAAU,UACVC,WAAY,kBACZC,SAAU,UAENE,GAAqB,CACzBC,MAAO,iCACP5jB,SAAU,oBAOZ,MAAM6jB,WAAwBna,GAC5B,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,EACjC,CAGA,kBAAWJ,GACT,OAAOwZ,EACT,CACA,sBAAWvZ,GACT,OAAO8Z,EACT,CACA,eAAWlhB,GACT,MA3CW,iBA4Cb,CAGA,UAAAshB,GACE,OAAO7gC,OAAOmiB,OAAOa,KAAK6E,QAAQuY,SAASt6B,KAAIghB,GAAU9D,KAAK8d,yBAAyBha,KAAS3d,OAAO2a,QACzG,CACA,UAAAid,GACE,OAAO/d,KAAK6d,aAAantB,OAAS,CACpC,CACA,aAAAstB,CAAcZ,GAMZ,OALApd,KAAKie,cAAcb,GACnBpd,KAAK6E,QAAQuY,QAAU,IAClBpd,KAAK6E,QAAQuY,WACbA,GAEEpd,IACT,CACA,MAAAke,GACE,MAAMC,EAAkB94B,SAASwvB,cAAc,OAC/CsJ,EAAgBC,UAAYpe,KAAKqe,eAAere,KAAK6E,QAAQ2Y,UAC7D,IAAK,MAAOzjB,EAAUukB,KAASthC,OAAOmkB,QAAQnB,KAAK6E,QAAQuY,SACzDpd,KAAKue,YAAYJ,EAAiBG,EAAMvkB,GAE1C,MAAMyjB,EAAWW,EAAgBpY,SAAS,GACpCsX,EAAard,KAAK8d,yBAAyB9d,KAAK6E,QAAQwY,YAI9D,OAHIA,GACFG,EAASniB,UAAU5E,OAAO4mB,EAAWn7B,MAAM,MAEtCs7B,CACT,CAGA,gBAAAvZ,CAAiBH,GACfa,MAAMV,iBAAiBH,GACvB9D,KAAKie,cAAcna,EAAOsZ,QAC5B,CACA,aAAAa,CAAcO,GACZ,IAAK,MAAOzkB,EAAUqjB,KAAYpgC,OAAOmkB,QAAQqd,GAC/C7Z,MAAMV,iBAAiB,CACrBlK,WACA4jB,MAAOP,GACNM,GAEP,CACA,WAAAa,CAAYf,EAAUJ,EAASrjB,GAC7B,MAAM0kB,EAAkB5Y,GAAeC,QAAQ/L,EAAUyjB,GACpDiB,KAGLrB,EAAUpd,KAAK8d,yBAAyBV,IAKpC,GAAUA,GACZpd,KAAK0e,sBAAsBhkB,GAAW0iB,GAAUqB,GAG9Cze,KAAK6E,QAAQhY,KACf4xB,EAAgBL,UAAYpe,KAAKqe,eAAejB,GAGlDqB,EAAgBE,YAAcvB,EAX5BqB,EAAgB9kB,SAYpB,CACA,cAAA0kB,CAAeG,GACb,OAAOxe,KAAK6E,QAAQyY,SApJxB,SAAsBsB,EAAYzB,EAAW0B,GAC3C,IAAKD,EAAWluB,OACd,OAAOkuB,EAET,GAAIC,GAAgD,mBAArBA,EAC7B,OAAOA,EAAiBD,GAE1B,MACME,GADY,IAAIl/B,OAAOm/B,WACKC,gBAAgBJ,EAAY,aACxD/9B,EAAW,GAAGlC,UAAUmgC,EAAgB5yB,KAAKkU,iBAAiB,MACpE,IAAK,MAAM7gB,KAAWsB,EAAU,CAC9B,MAAMo+B,EAAc1/B,EAAQC,SAASC,cACrC,IAAKzC,OAAO4D,KAAKu8B,GAAW/b,SAAS6d,GAAc,CACjD1/B,EAAQoa,SACR,QACF,CACA,MAAMulB,EAAgB,GAAGvgC,UAAUY,EAAQ0B,YACrCk+B,EAAoB,GAAGxgC,OAAOw+B,EAAU,MAAQ,GAAIA,EAAU8B,IAAgB,IACpF,IAAK,MAAMl9B,KAAam9B,EACjBtC,GAAiB76B,EAAWo9B,IAC/B5/B,EAAQ4B,gBAAgBY,EAAUvC,SAGxC,CACA,OAAOs/B,EAAgB5yB,KAAKkyB,SAC9B,CA2HmCgB,CAAaZ,EAAKxe,KAAK6E,QAAQsY,UAAWnd,KAAK6E,QAAQ0Y,YAAciB,CACtG,CACA,wBAAAV,CAAyBU,GACvB,OAAO3hB,GAAQ2hB,EAAK,CAACxe,MACvB,CACA,qBAAA0e,CAAsBn/B,EAASk/B,GAC7B,GAAIze,KAAK6E,QAAQhY,KAGf,OAFA4xB,EAAgBL,UAAY,QAC5BK,EAAgB3J,OAAOv1B,GAGzBk/B,EAAgBE,YAAcp/B,EAAQo/B,WACxC,EAeF,MACMU,GAAwB,IAAI/oB,IAAI,CAAC,WAAY,YAAa,eAC1DgpB,GAAoB,OAEpBC,GAAoB,OACpBC,GAAyB,iBACzBC,GAAiB,SACjBC,GAAmB,gBACnBC,GAAgB,QAChBC,GAAgB,QAahBC,GAAgB,CACpBC,KAAM,OACNC,IAAK,MACLC,MAAO/jB,KAAU,OAAS,QAC1BgkB,OAAQ,SACRC,KAAMjkB,KAAU,QAAU,QAEtBkkB,GAAY,CAChBhD,UAAWtC,GACXuF,WAAW,EACXnyB,SAAU,kBACVoyB,WAAW,EACXC,YAAa,GACbC,MAAO,EACPvwB,mBAAoB,CAAC,MAAO,QAAS,SAAU,QAC/CnD,MAAM,EACN7E,OAAQ,CAAC,EAAG,GACZtJ,UAAW,MACXszB,aAAc,KACdsL,UAAU,EACVC,WAAY,KACZxjB,UAAU,EACVyjB,SAAU,+GACVgD,MAAO,GACP5e,QAAS,eAEL6e,GAAgB,CACpBtD,UAAW,SACXiD,UAAW,UACXnyB,SAAU,mBACVoyB,UAAW,2BACXC,YAAa,oBACbC,MAAO,kBACPvwB,mBAAoB,QACpBnD,KAAM,UACN7E,OAAQ,0BACRtJ,UAAW,oBACXszB,aAAc,yBACdsL,SAAU,UACVC,WAAY,kBACZxjB,SAAU,mBACVyjB,SAAU,SACVgD,MAAO,4BACP5e,QAAS,UAOX,MAAM8e,WAAgBhc,GACpB,WAAAP,CAAY5kB,EAASukB,GACnB,QAAsB,IAAX,EACT,MAAM,IAAIU,UAAU,+DAEtBG,MAAMplB,EAASukB,GAGf9D,KAAK2gB,YAAa,EAClB3gB,KAAK4gB,SAAW,EAChB5gB,KAAK6gB,WAAa,KAClB7gB,KAAK8gB,eAAiB,CAAC,EACvB9gB,KAAKmS,QAAU,KACfnS,KAAK+gB,iBAAmB,KACxB/gB,KAAKghB,YAAc,KAGnBhhB,KAAKihB,IAAM,KACXjhB,KAAKkhB,gBACAlhB,KAAK6E,QAAQ9K,UAChBiG,KAAKmhB,WAET,CAGA,kBAAWzd,GACT,OAAOyc,EACT,CACA,sBAAWxc,GACT,OAAO8c,EACT,CACA,eAAWlkB,GACT,MAxGW,SAyGb,CAGA,MAAA6kB,GACEphB,KAAK2gB,YAAa,CACpB,CACA,OAAAU,GACErhB,KAAK2gB,YAAa,CACpB,CACA,aAAAW,GACEthB,KAAK2gB,YAAc3gB,KAAK2gB,UAC1B,CACA,MAAAhZ,GACO3H,KAAK2gB,aAGV3gB,KAAK8gB,eAAeS,OAASvhB,KAAK8gB,eAAeS,MAC7CvhB,KAAK2P,WACP3P,KAAKwhB,SAGPxhB,KAAKyhB,SACP,CACA,OAAA1c,GACEmI,aAAalN,KAAK4gB,UAClBrgB,GAAaC,IAAIR,KAAK4E,SAAS5J,QAAQykB,IAAiBC,GAAkB1f,KAAK0hB,mBAC3E1hB,KAAK4E,SAASpJ,aAAa,2BAC7BwE,KAAK4E,SAASxjB,aAAa,QAAS4e,KAAK4E,SAASpJ,aAAa,2BAEjEwE,KAAK2hB,iBACLhd,MAAMI,SACR,CACA,IAAA8K,GACE,GAAoC,SAAhC7P,KAAK4E,SAAS7jB,MAAMgxB,QACtB,MAAM,IAAInO,MAAM,uCAElB,IAAM5D,KAAK4hB,mBAAoB5hB,KAAK2gB,WAClC,OAEF,MAAMnH,EAAYjZ,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAlItD,SAoIXqc,GADapmB,GAAeuE,KAAK4E,WACL5E,KAAK4E,SAAS9kB,cAAcwF,iBAAiBd,SAASwb,KAAK4E,UAC7F,GAAI4U,EAAUxX,mBAAqB6f,EACjC,OAIF7hB,KAAK2hB,iBACL,MAAMV,EAAMjhB,KAAK8hB,iBACjB9hB,KAAK4E,SAASxjB,aAAa,mBAAoB6/B,EAAIzlB,aAAa,OAChE,MAAM,UACJ6kB,GACErgB,KAAK6E,QAYT,GAXK7E,KAAK4E,SAAS9kB,cAAcwF,gBAAgBd,SAASwb,KAAKihB,OAC7DZ,EAAUvL,OAAOmM,GACjB1gB,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhJpC,cAkJnBxF,KAAKmS,QAAUnS,KAAKwS,cAAcyO,GAClCA,EAAI5lB,UAAU5E,IAAI8oB,IAMd,iBAAkBl6B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAac,GAAG9hB,EAAS,YAAaqc,IAU1CoE,KAAKmF,gBAPY,KACf5E,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhKrC,WAiKQ,IAApBxF,KAAK6gB,YACP7gB,KAAKwhB,SAEPxhB,KAAK6gB,YAAa,CAAK,GAEK7gB,KAAKihB,IAAKjhB,KAAKgO,cAC/C,CACA,IAAA4B,GACE,GAAK5P,KAAK2P,aAGQpP,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UA/KtD,SAgLHxD,iBAAd,CAQA,GALYhC,KAAK8hB,iBACbzmB,UAAU1B,OAAO4lB,IAIjB,iBAAkBl6B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAG3CoE,KAAK8gB,eAA4B,OAAI,EACrC9gB,KAAK8gB,eAAelB,KAAiB,EACrC5f,KAAK8gB,eAAenB,KAAiB,EACrC3f,KAAK6gB,WAAa,KAYlB7gB,KAAKmF,gBAVY,KACXnF,KAAK+hB,yBAGJ/hB,KAAK6gB,YACR7gB,KAAK2hB,iBAEP3hB,KAAK4E,SAASzjB,gBAAgB,oBAC9Bof,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAzMpC,WAyM8D,GAEnDxF,KAAKihB,IAAKjhB,KAAKgO,cA1B7C,CA2BF,CACA,MAAAjjB,GACMiV,KAAKmS,SACPnS,KAAKmS,QAAQpnB,QAEjB,CAGA,cAAA62B,GACE,OAAO9gB,QAAQd,KAAKgiB,YACtB,CACA,cAAAF,GAIE,OAHK9hB,KAAKihB,MACRjhB,KAAKihB,IAAMjhB,KAAKiiB,kBAAkBjiB,KAAKghB,aAAehhB,KAAKkiB,2BAEtDliB,KAAKihB,GACd,CACA,iBAAAgB,CAAkB7E,GAChB,MAAM6D,EAAMjhB,KAAKmiB,oBAAoB/E,GAASc,SAG9C,IAAK+C,EACH,OAAO,KAETA,EAAI5lB,UAAU1B,OAAO2lB,GAAmBC,IAExC0B,EAAI5lB,UAAU5E,IAAI,MAAMuJ,KAAKmE,YAAY5H,aACzC,MAAM6lB,EAvuGKC,KACb,GACEA,GAAUlgC,KAAKmgC,MA/BH,IA+BSngC,KAAKogC,gBACnBl9B,SAASm9B,eAAeH,IACjC,OAAOA,CAAM,EAmuGGI,CAAOziB,KAAKmE,YAAY5H,MAAM1c,WAK5C,OAJAohC,EAAI7/B,aAAa,KAAMghC,GACnBpiB,KAAKgO,eACPiT,EAAI5lB,UAAU5E,IAAI6oB,IAEb2B,CACT,CACA,UAAAyB,CAAWtF,GACTpd,KAAKghB,YAAc5D,EACfpd,KAAK2P,aACP3P,KAAK2hB,iBACL3hB,KAAK6P,OAET,CACA,mBAAAsS,CAAoB/E,GAYlB,OAXIpd,KAAK+gB,iBACP/gB,KAAK+gB,iBAAiB/C,cAAcZ,GAEpCpd,KAAK+gB,iBAAmB,IAAInD,GAAgB,IACvC5d,KAAK6E,QAGRuY,UACAC,WAAYrd,KAAK8d,yBAAyB9d,KAAK6E,QAAQyb,eAGpDtgB,KAAK+gB,gBACd,CACA,sBAAAmB,GACE,MAAO,CACL,CAAC1C,IAAyBxf,KAAKgiB,YAEnC,CACA,SAAAA,GACE,OAAOhiB,KAAK8d,yBAAyB9d,KAAK6E,QAAQ2b,QAAUxgB,KAAK4E,SAASpJ,aAAa,yBACzF,CAGA,4BAAAmnB,CAA6BvjB,GAC3B,OAAOY,KAAKmE,YAAYmB,oBAAoBlG,EAAMW,eAAgBC,KAAK4iB,qBACzE,CACA,WAAA5U,GACE,OAAOhO,KAAK6E,QAAQub,WAAapgB,KAAKihB,KAAOjhB,KAAKihB,IAAI5lB,UAAU7W,SAAS86B,GAC3E,CACA,QAAA3P,GACE,OAAO3P,KAAKihB,KAAOjhB,KAAKihB,IAAI5lB,UAAU7W,SAAS+6B,GACjD,CACA,aAAA/M,CAAcyO,GACZ,MAAMviC,EAAYme,GAAQmD,KAAK6E,QAAQnmB,UAAW,CAACshB,KAAMihB,EAAKjhB,KAAK4E,WAC7Die,EAAahD,GAAcnhC,EAAU+lB,eAC3C,OAAO,GAAoBzE,KAAK4E,SAAUqc,EAAKjhB,KAAK4S,iBAAiBiQ,GACvE,CACA,UAAA7P,GACE,MAAM,OACJhrB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAOgQ,SAAS5vB,EAAO,MAEzC,mBAAXqK,EACFirB,GAAcjrB,EAAOirB,EAAYjT,KAAK4E,UAExC5c,CACT,CACA,wBAAA81B,CAAyBU,GACvB,OAAO3hB,GAAQ2hB,EAAK,CAACxe,KAAK4E,UAC5B,CACA,gBAAAgO,CAAiBiQ,GACf,MAAM3P,EAAwB,CAC5Bx0B,UAAWmkC,EACXzsB,UAAW,CAAC,CACV9V,KAAM,OACNmB,QAAS,CACPuO,mBAAoBgQ,KAAK6E,QAAQ7U,qBAElC,CACD1P,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAKgT,eAEd,CACD1yB,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,QACNmB,QAAS,CACPlC,QAAS,IAAIygB,KAAKmE,YAAY5H,eAE/B,CACDjc,KAAM,kBACNC,SAAS,EACTC,MAAO,aACPC,GAAI4J,IAGF2V,KAAK8hB,iBAAiB1gC,aAAa,wBAAyBiJ,EAAK1J,MAAMjC,UAAU,KAIvF,MAAO,IACFw0B,KACArW,GAAQmD,KAAK6E,QAAQmN,aAAc,CAACkB,IAE3C,CACA,aAAAgO,GACE,MAAM4B,EAAW9iB,KAAK6E,QAAQjD,QAAQ1f,MAAM,KAC5C,IAAK,MAAM0f,KAAWkhB,EACpB,GAAgB,UAAZlhB,EACFrB,GAAac,GAAGrB,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAjVlC,SAiV4DxF,KAAK6E,QAAQ9K,UAAUqF,IAC/EY,KAAK2iB,6BAA6BvjB,GAC1CuI,QAAQ,SAEb,GA3VU,WA2VN/F,EAA4B,CACrC,MAAMmhB,EAAUnhB,IAAY+d,GAAgB3f,KAAKmE,YAAYqB,UAnV5C,cAmV0ExF,KAAKmE,YAAYqB,UArV5F,WAsVVwd,EAAWphB,IAAY+d,GAAgB3f,KAAKmE,YAAYqB,UAnV7C,cAmV2ExF,KAAKmE,YAAYqB,UArV5F,YAsVjBjF,GAAac,GAAGrB,KAAK4E,SAAUme,EAAS/iB,KAAK6E,QAAQ9K,UAAUqF,IAC7D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAClDkU,EAAQwN,eAA8B,YAAf1hB,EAAMqB,KAAqBmf,GAAgBD,KAAiB,EACnFrM,EAAQmO,QAAQ,IAElBlhB,GAAac,GAAGrB,KAAK4E,SAAUoe,EAAUhjB,KAAK6E,QAAQ9K,UAAUqF,IAC9D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAClDkU,EAAQwN,eAA8B,aAAf1hB,EAAMqB,KAAsBmf,GAAgBD,IAAiBrM,EAAQ1O,SAASpgB,SAAS4a,EAAMU,eACpHwT,EAAQkO,QAAQ,GAEpB,CAEFxhB,KAAK0hB,kBAAoB,KACnB1hB,KAAK4E,UACP5E,KAAK4P,MACP,EAEFrP,GAAac,GAAGrB,KAAK4E,SAAS5J,QAAQykB,IAAiBC,GAAkB1f,KAAK0hB,kBAChF,CACA,SAAAP,GACE,MAAMX,EAAQxgB,KAAK4E,SAASpJ,aAAa,SACpCglB,IAGAxgB,KAAK4E,SAASpJ,aAAa,eAAkBwE,KAAK4E,SAAS+Z,YAAYhZ,QAC1E3F,KAAK4E,SAASxjB,aAAa,aAAco/B,GAE3CxgB,KAAK4E,SAASxjB,aAAa,yBAA0Bo/B,GACrDxgB,KAAK4E,SAASzjB,gBAAgB,SAChC,CACA,MAAAsgC,GACMzhB,KAAK2P,YAAc3P,KAAK6gB,WAC1B7gB,KAAK6gB,YAAa,GAGpB7gB,KAAK6gB,YAAa,EAClB7gB,KAAKijB,aAAY,KACXjjB,KAAK6gB,YACP7gB,KAAK6P,MACP,GACC7P,KAAK6E,QAAQ0b,MAAM1Q,MACxB,CACA,MAAA2R,GACMxhB,KAAK+hB,yBAGT/hB,KAAK6gB,YAAa,EAClB7gB,KAAKijB,aAAY,KACVjjB,KAAK6gB,YACR7gB,KAAK4P,MACP,GACC5P,KAAK6E,QAAQ0b,MAAM3Q,MACxB,CACA,WAAAqT,CAAYrlB,EAASslB,GACnBhW,aAAalN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW/iB,WAAWD,EAASslB,EACtC,CACA,oBAAAnB,GACE,OAAO/kC,OAAOmiB,OAAOa,KAAK8gB,gBAAgB1f,UAAS,EACrD,CACA,UAAAyC,CAAWC,GACT,MAAMqf,EAAiBngB,GAAYG,kBAAkBnD,KAAK4E,UAC1D,IAAK,MAAMwe,KAAiBpmC,OAAO4D,KAAKuiC,GAClC9D,GAAsB1oB,IAAIysB,WACrBD,EAAeC,GAU1B,OAPAtf,EAAS,IACJqf,KACmB,iBAAXrf,GAAuBA,EAASA,EAAS,CAAC,GAEvDA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAchB,OAbAA,EAAOuc,WAAiC,IAArBvc,EAAOuc,UAAsBh7B,SAAS6G,KAAOwO,GAAWoJ,EAAOuc,WACtD,iBAAjBvc,EAAOyc,QAChBzc,EAAOyc,MAAQ,CACb1Q,KAAM/L,EAAOyc,MACb3Q,KAAM9L,EAAOyc,QAGW,iBAAjBzc,EAAO0c,QAChB1c,EAAO0c,MAAQ1c,EAAO0c,MAAM3gC,YAEA,iBAAnBikB,EAAOsZ,UAChBtZ,EAAOsZ,QAAUtZ,EAAOsZ,QAAQv9B,YAE3BikB,CACT,CACA,kBAAA8e,GACE,MAAM9e,EAAS,CAAC,EAChB,IAAK,MAAOhnB,EAAKa,KAAUX,OAAOmkB,QAAQnB,KAAK6E,SACzC7E,KAAKmE,YAAYT,QAAQ5mB,KAASa,IACpCmmB,EAAOhnB,GAAOa,GASlB,OANAmmB,EAAO/J,UAAW,EAClB+J,EAAOlC,QAAU,SAKVkC,CACT,CACA,cAAA6d,GACM3hB,KAAKmS,UACPnS,KAAKmS,QAAQnZ,UACbgH,KAAKmS,QAAU,MAEbnS,KAAKihB,MACPjhB,KAAKihB,IAAItnB,SACTqG,KAAKihB,IAAM,KAEf,CAGA,sBAAOxkB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOq2B,GAAQpb,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmBukB,IAcnB,MACM2C,GAAiB,kBACjBC,GAAmB,gBACnBC,GAAY,IACb7C,GAAQhd,QACX0Z,QAAS,GACTp1B,OAAQ,CAAC,EAAG,GACZtJ,UAAW,QACX8+B,SAAU,8IACV5b,QAAS,SAEL4hB,GAAgB,IACjB9C,GAAQ/c,YACXyZ,QAAS,kCAOX,MAAMqG,WAAgB/C,GAEpB,kBAAWhd,GACT,OAAO6f,EACT,CACA,sBAAW5f,GACT,OAAO6f,EACT,CACA,eAAWjnB,GACT,MA7BW,SA8Bb,CAGA,cAAAqlB,GACE,OAAO5hB,KAAKgiB,aAAehiB,KAAK0jB,aAClC,CAGA,sBAAAxB,GACE,MAAO,CACL,CAACmB,IAAiBrjB,KAAKgiB,YACvB,CAACsB,IAAmBtjB,KAAK0jB,cAE7B,CACA,WAAAA,GACE,OAAO1jB,KAAK8d,yBAAyB9d,KAAK6E,QAAQuY,QACpD,CAGA,sBAAO3gB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOo5B,GAAQne,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmBsnB,IAcnB,MAEME,GAAc,gBAEdC,GAAiB,WAAWD,KAC5BE,GAAc,QAAQF,KACtBG,GAAwB,OAAOH,cAE/BI,GAAsB,SAEtBC,GAAwB,SAExBC,GAAqB,YAGrBC,GAAsB,GAAGD,mBAA+CA,uBAGxEE,GAAY,CAChBn8B,OAAQ,KAERo8B,WAAY,eACZC,cAAc,EACd93B,OAAQ,KACR+3B,UAAW,CAAC,GAAK,GAAK,IAElBC,GAAgB,CACpBv8B,OAAQ,gBAERo8B,WAAY,SACZC,aAAc,UACd93B,OAAQ,UACR+3B,UAAW,SAOb,MAAME,WAAkB9f,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GAGf9D,KAAKykB,aAAe,IAAIvzB,IACxB8O,KAAK0kB,oBAAsB,IAAIxzB,IAC/B8O,KAAK2kB,aAA6D,YAA9C1/B,iBAAiB+a,KAAK4E,UAAU5Y,UAA0B,KAAOgU,KAAK4E,SAC1F5E,KAAK4kB,cAAgB,KACrB5kB,KAAK6kB,UAAY,KACjB7kB,KAAK8kB,oBAAsB,CACzBC,gBAAiB,EACjBC,gBAAiB,GAEnBhlB,KAAKilB,SACP,CAGA,kBAAWvhB,GACT,OAAOygB,EACT,CACA,sBAAWxgB,GACT,OAAO4gB,EACT,CACA,eAAWhoB,GACT,MAhEW,WAiEb,CAGA,OAAA0oB,GACEjlB,KAAKklB,mCACLllB,KAAKmlB,2BACDnlB,KAAK6kB,UACP7kB,KAAK6kB,UAAUO,aAEfplB,KAAK6kB,UAAY7kB,KAAKqlB,kBAExB,IAAK,MAAMC,KAAWtlB,KAAK0kB,oBAAoBvlB,SAC7Ca,KAAK6kB,UAAUU,QAAQD,EAE3B,CACA,OAAAvgB,GACE/E,KAAK6kB,UAAUO,aACfzgB,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAShB,OAPAA,EAAOvX,OAASmO,GAAWoJ,EAAOvX,SAAWlH,SAAS6G,KAGtD4X,EAAOsgB,WAAatgB,EAAO9b,OAAS,GAAG8b,EAAO9b,oBAAsB8b,EAAOsgB,WAC3C,iBAArBtgB,EAAOwgB,YAChBxgB,EAAOwgB,UAAYxgB,EAAOwgB,UAAUpiC,MAAM,KAAKY,KAAInF,GAAS4f,OAAOC,WAAW7f,MAEzEmmB,CACT,CACA,wBAAAqhB,GACOnlB,KAAK6E,QAAQwf,eAKlB9jB,GAAaC,IAAIR,KAAK6E,QAAQtY,OAAQs3B,IACtCtjB,GAAac,GAAGrB,KAAK6E,QAAQtY,OAAQs3B,GAAaG,IAAuB5kB,IACvE,MAAMomB,EAAoBxlB,KAAK0kB,oBAAoBvnC,IAAIiiB,EAAM7S,OAAOtB,MACpE,GAAIu6B,EAAmB,CACrBpmB,EAAMkD,iBACN,MAAM3G,EAAOqE,KAAK2kB,cAAgB/kC,OAC5BmE,EAASyhC,EAAkBnhC,UAAY2b,KAAK4E,SAASvgB,UAC3D,GAAIsX,EAAK8pB,SAKP,YAJA9pB,EAAK8pB,SAAS,CACZ9jC,IAAKoC,EACL2hC,SAAU,WAMd/pB,EAAKlQ,UAAY1H,CACnB,KAEJ,CACA,eAAAshC,GACE,MAAM5jC,EAAU,CACdka,KAAMqE,KAAK2kB,aACXL,UAAWtkB,KAAK6E,QAAQyf,UACxBF,WAAYpkB,KAAK6E,QAAQuf,YAE3B,OAAO,IAAIuB,sBAAqBxkB,GAAWnB,KAAK4lB,kBAAkBzkB,IAAU1f,EAC9E,CAGA,iBAAAmkC,CAAkBzkB,GAChB,MAAM0kB,EAAgBlI,GAAS3d,KAAKykB,aAAatnC,IAAI,IAAIwgC,EAAMpxB,OAAO4N,MAChEub,EAAWiI,IACf3d,KAAK8kB,oBAAoBC,gBAAkBpH,EAAMpxB,OAAOlI,UACxD2b,KAAK8lB,SAASD,EAAclI,GAAO,EAE/BqH,GAAmBhlB,KAAK2kB,cAAgBt/B,SAASC,iBAAiBmG,UAClEs6B,EAAkBf,GAAmBhlB,KAAK8kB,oBAAoBE,gBACpEhlB,KAAK8kB,oBAAoBE,gBAAkBA,EAC3C,IAAK,MAAMrH,KAASxc,EAAS,CAC3B,IAAKwc,EAAMqI,eAAgB,CACzBhmB,KAAK4kB,cAAgB,KACrB5kB,KAAKimB,kBAAkBJ,EAAclI,IACrC,QACF,CACA,MAAMuI,EAA2BvI,EAAMpxB,OAAOlI,WAAa2b,KAAK8kB,oBAAoBC,gBAEpF,GAAIgB,GAAmBG,GAGrB,GAFAxQ,EAASiI,IAEJqH,EACH,YAMCe,GAAoBG,GACvBxQ,EAASiI,EAEb,CACF,CACA,gCAAAuH,GACEllB,KAAKykB,aAAe,IAAIvzB,IACxB8O,KAAK0kB,oBAAsB,IAAIxzB,IAC/B,MAAMi1B,EAActgB,GAAe1T,KAAK6xB,GAAuBhkB,KAAK6E,QAAQtY,QAC5E,IAAK,MAAM65B,KAAUD,EAAa,CAEhC,IAAKC,EAAOn7B,MAAQiQ,GAAWkrB,GAC7B,SAEF,MAAMZ,EAAoB3f,GAAeC,QAAQugB,UAAUD,EAAOn7B,MAAO+U,KAAK4E,UAG1EjK,GAAU6qB,KACZxlB,KAAKykB,aAAa1yB,IAAIs0B,UAAUD,EAAOn7B,MAAOm7B,GAC9CpmB,KAAK0kB,oBAAoB3yB,IAAIq0B,EAAOn7B,KAAMu6B,GAE9C,CACF,CACA,QAAAM,CAASv5B,GACHyT,KAAK4kB,gBAAkBr4B,IAG3ByT,KAAKimB,kBAAkBjmB,KAAK6E,QAAQtY,QACpCyT,KAAK4kB,cAAgBr4B,EACrBA,EAAO8O,UAAU5E,IAAIstB,IACrB/jB,KAAKsmB,iBAAiB/5B,GACtBgU,GAAaqB,QAAQ5B,KAAK4E,SAAUgf,GAAgB,CAClD9jB,cAAevT,IAEnB,CACA,gBAAA+5B,CAAiB/5B,GAEf,GAAIA,EAAO8O,UAAU7W,SA9LQ,iBA+L3BqhB,GAAeC,QArLc,mBAqLsBvZ,EAAOyO,QAtLtC,cAsLkEK,UAAU5E,IAAIstB,SAGtG,IAAK,MAAMwC,KAAa1gB,GAAeI,QAAQ1Z,EA9LnB,qBAiM1B,IAAK,MAAMxJ,KAAQ8iB,GAAeM,KAAKogB,EAAWrC,IAChDnhC,EAAKsY,UAAU5E,IAAIstB,GAGzB,CACA,iBAAAkC,CAAkBxhC,GAChBA,EAAO4W,UAAU1B,OAAOoqB,IACxB,MAAMyC,EAAc3gB,GAAe1T,KAAK,GAAG6xB,MAAyBD,KAAuBt/B,GAC3F,IAAK,MAAM9E,KAAQ6mC,EACjB7mC,EAAK0b,UAAU1B,OAAOoqB,GAE1B,CAGA,sBAAOtnB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOm6B,GAAUlf,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGzhB,OAAQkkC,IAAuB,KAC7C,IAAK,MAAM2C,KAAO5gB,GAAe1T,KApOT,0BAqOtBqyB,GAAUlf,oBAAoBmhB,EAChC,IAOFtqB,GAAmBqoB,IAcnB,MAEMkC,GAAc,UACdC,GAAe,OAAOD,KACtBE,GAAiB,SAASF,KAC1BG,GAAe,OAAOH,KACtBI,GAAgB,QAAQJ,KACxBK,GAAuB,QAAQL,KAC/BM,GAAgB,UAAUN,KAC1BO,GAAsB,OAAOP,KAC7BQ,GAAiB,YACjBC,GAAkB,aAClBC,GAAe,UACfC,GAAiB,YACjBC,GAAW,OACXC,GAAU,MACVC,GAAoB,SACpBC,GAAoB,OACpBC,GAAoB,OAEpBC,GAA2B,mBAE3BC,GAA+B,QAAQD,MAIvCE,GAAuB,2EACvBC,GAAsB,YAFOF,uBAAiDA,mBAA6CA,OAE/EC,KAC5CE,GAA8B,IAAIP,8BAA6CA,+BAA8CA,4BAMnI,MAAMQ,WAAYtjB,GAChB,WAAAP,CAAY5kB,GACVolB,MAAMplB,GACNygB,KAAKoS,QAAUpS,KAAK4E,SAAS5J,QAdN,uCAelBgF,KAAKoS,UAOVpS,KAAKioB,sBAAsBjoB,KAAKoS,QAASpS,KAAKkoB,gBAC9C3nB,GAAac,GAAGrB,KAAK4E,SAAUoiB,IAAe5nB,GAASY,KAAK6M,SAASzN,KACvE,CAGA,eAAW7C,GACT,MAnDW,KAoDb,CAGA,IAAAsT,GAEE,MAAMsY,EAAYnoB,KAAK4E,SACvB,GAAI5E,KAAKooB,cAAcD,GACrB,OAIF,MAAME,EAASroB,KAAKsoB,iBACdC,EAAYF,EAAS9nB,GAAaqB,QAAQymB,EAAQ1B,GAAc,CACpE7mB,cAAeqoB,IACZ,KACa5nB,GAAaqB,QAAQumB,EAAWtB,GAAc,CAC9D/mB,cAAeuoB,IAEHrmB,kBAAoBumB,GAAaA,EAAUvmB,mBAGzDhC,KAAKwoB,YAAYH,EAAQF,GACzBnoB,KAAKyoB,UAAUN,EAAWE,GAC5B,CAGA,SAAAI,CAAUlpC,EAASmpC,GACZnpC,IAGLA,EAAQ8b,UAAU5E,IAAI+wB,IACtBxnB,KAAKyoB,UAAU5iB,GAAec,uBAAuBpnB,IAcrDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ4B,gBAAgB,YACxB5B,EAAQ6B,aAAa,iBAAiB,GACtC4e,KAAK2oB,gBAAgBppC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAASunC,GAAe,CAC3ChnB,cAAe4oB,KAPfnpC,EAAQ8b,UAAU5E,IAAIixB,GAQtB,GAE0BnoC,EAASA,EAAQ8b,UAAU7W,SAASijC,KACpE,CACA,WAAAe,CAAYjpC,EAASmpC,GACdnpC,IAGLA,EAAQ8b,UAAU1B,OAAO6tB,IACzBjoC,EAAQq7B,OACR5a,KAAKwoB,YAAY3iB,GAAec,uBAAuBpnB,IAcvDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ6B,aAAa,iBAAiB,GACtC7B,EAAQ6B,aAAa,WAAY,MACjC4e,KAAK2oB,gBAAgBppC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAASqnC,GAAgB,CAC5C9mB,cAAe4oB,KAPfnpC,EAAQ8b,UAAU1B,OAAO+tB,GAQzB,GAE0BnoC,EAASA,EAAQ8b,UAAU7W,SAASijC,KACpE,CACA,QAAA5a,CAASzN,GACP,IAAK,CAAC8nB,GAAgBC,GAAiBC,GAAcC,GAAgBC,GAAUC,IAASnmB,SAAShC,EAAMtiB,KACrG,OAEFsiB,EAAM0U,kBACN1U,EAAMkD,iBACN,MAAMyD,EAAW/F,KAAKkoB,eAAe/hC,QAAO5G,IAAY2b,GAAW3b,KACnE,IAAIqpC,EACJ,GAAI,CAACtB,GAAUC,IAASnmB,SAAShC,EAAMtiB,KACrC8rC,EAAoB7iB,EAAS3G,EAAMtiB,MAAQwqC,GAAW,EAAIvhB,EAASrV,OAAS,OACvE,CACL,MAAM8c,EAAS,CAAC2Z,GAAiBE,IAAgBjmB,SAAShC,EAAMtiB,KAChE8rC,EAAoB9qB,GAAqBiI,EAAU3G,EAAM7S,OAAQihB,GAAQ,EAC3E,CACIob,IACFA,EAAkBnW,MAAM,CACtBoW,eAAe,IAEjBb,GAAI1iB,oBAAoBsjB,GAAmB/Y,OAE/C,CACA,YAAAqY,GAEE,OAAOriB,GAAe1T,KAAK21B,GAAqB9nB,KAAKoS,QACvD,CACA,cAAAkW,GACE,OAAOtoB,KAAKkoB,eAAe/1B,MAAKzN,GAASsb,KAAKooB,cAAc1jC,MAAW,IACzE,CACA,qBAAAujC,CAAsBxjC,EAAQshB,GAC5B/F,KAAK8oB,yBAAyBrkC,EAAQ,OAAQ,WAC9C,IAAK,MAAMC,KAASqhB,EAClB/F,KAAK+oB,6BAA6BrkC,EAEtC,CACA,4BAAAqkC,CAA6BrkC,GAC3BA,EAAQsb,KAAKgpB,iBAAiBtkC,GAC9B,MAAMukC,EAAWjpB,KAAKooB,cAAc1jC,GAC9BwkC,EAAYlpB,KAAKmpB,iBAAiBzkC,GACxCA,EAAMtD,aAAa,gBAAiB6nC,GAChCC,IAAcxkC,GAChBsb,KAAK8oB,yBAAyBI,EAAW,OAAQ,gBAE9CD,GACHvkC,EAAMtD,aAAa,WAAY,MAEjC4e,KAAK8oB,yBAAyBpkC,EAAO,OAAQ,OAG7Csb,KAAKopB,mCAAmC1kC,EAC1C,CACA,kCAAA0kC,CAAmC1kC,GACjC,MAAM6H,EAASsZ,GAAec,uBAAuBjiB,GAChD6H,IAGLyT,KAAK8oB,yBAAyBv8B,EAAQ,OAAQ,YAC1C7H,EAAMyV,IACR6F,KAAK8oB,yBAAyBv8B,EAAQ,kBAAmB,GAAG7H,EAAMyV,MAEtE,CACA,eAAAwuB,CAAgBppC,EAAS8pC,GACvB,MAAMH,EAAYlpB,KAAKmpB,iBAAiB5pC,GACxC,IAAK2pC,EAAU7tB,UAAU7W,SApKN,YAqKjB,OAEF,MAAMmjB,EAAS,CAAC5N,EAAUoa,KACxB,MAAM50B,EAAUsmB,GAAeC,QAAQ/L,EAAUmvB,GAC7C3pC,GACFA,EAAQ8b,UAAUsM,OAAOwM,EAAWkV,EACtC,EAEF1hB,EAAOggB,GAA0BH,IACjC7f,EA5K2B,iBA4KI+f,IAC/BwB,EAAU9nC,aAAa,gBAAiBioC,EAC1C,CACA,wBAAAP,CAAyBvpC,EAASwC,EAAWpE,GACtC4B,EAAQgc,aAAaxZ,IACxBxC,EAAQ6B,aAAaW,EAAWpE,EAEpC,CACA,aAAAyqC,CAAc9Y,GACZ,OAAOA,EAAKjU,UAAU7W,SAASgjC,GACjC,CAGA,gBAAAwB,CAAiB1Z,GACf,OAAOA,EAAKtJ,QAAQ8hB,IAAuBxY,EAAOzJ,GAAeC,QAAQgiB,GAAqBxY,EAChG,CAGA,gBAAA6Z,CAAiB7Z,GACf,OAAOA,EAAKtU,QA5LO,gCA4LoBsU,CACzC,CAGA,sBAAO7S,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO29B,GAAI1iB,oBAAoBtF,MACrC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGhc,SAAU0hC,GAAsBc,IAAsB,SAAUzoB,GAC1E,CAAC,IAAK,QAAQgC,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,OAGfgoB,GAAI1iB,oBAAoBtF,MAAM6P,MAChC,IAKAtP,GAAac,GAAGzhB,OAAQqnC,IAAqB,KAC3C,IAAK,MAAM1nC,KAAWsmB,GAAe1T,KAAK41B,IACxCC,GAAI1iB,oBAAoB/lB,EAC1B,IAMF4c,GAAmB6rB,IAcnB,MAEMhjB,GAAY,YACZskB,GAAkB,YAAYtkB,KAC9BukB,GAAiB,WAAWvkB,KAC5BwkB,GAAgB,UAAUxkB,KAC1BykB,GAAiB,WAAWzkB,KAC5B0kB,GAAa,OAAO1kB,KACpB2kB,GAAe,SAAS3kB,KACxB4kB,GAAa,OAAO5kB,KACpB6kB,GAAc,QAAQ7kB,KAEtB8kB,GAAkB,OAClBC,GAAkB,OAClBC,GAAqB,UACrBrmB,GAAc,CAClByc,UAAW,UACX6J,SAAU,UACV1J,MAAO,UAEH7c,GAAU,CACd0c,WAAW,EACX6J,UAAU,EACV1J,MAAO,KAOT,MAAM2J,WAAcxlB,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAK4gB,SAAW,KAChB5gB,KAAKmqB,sBAAuB,EAC5BnqB,KAAKoqB,yBAA0B,EAC/BpqB,KAAKkhB,eACP,CAGA,kBAAWxd,GACT,OAAOA,EACT,CACA,sBAAWC,GACT,OAAOA,EACT,CACA,eAAWpH,GACT,MA/CS,OAgDX,CAGA,IAAAsT,GACoBtP,GAAaqB,QAAQ5B,KAAK4E,SAAUglB,IACxC5nB,mBAGdhC,KAAKqqB,gBACDrqB,KAAK6E,QAAQub,WACfpgB,KAAK4E,SAASvJ,UAAU5E,IA/CN,QAsDpBuJ,KAAK4E,SAASvJ,UAAU1B,OAAOmwB,IAC/BjuB,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIszB,GAAiBC,IAC7ChqB,KAAKmF,gBARY,KACfnF,KAAK4E,SAASvJ,UAAU1B,OAAOqwB,IAC/BzpB,GAAaqB,QAAQ5B,KAAK4E,SAAUilB,IACpC7pB,KAAKsqB,oBAAoB,GAKGtqB,KAAK4E,SAAU5E,KAAK6E,QAAQub,WAC5D,CACA,IAAAxQ,GACO5P,KAAKuqB,YAGQhqB,GAAaqB,QAAQ5B,KAAK4E,SAAU8kB,IACxC1nB,mBAQdhC,KAAK4E,SAASvJ,UAAU5E,IAAIuzB,IAC5BhqB,KAAKmF,gBANY,KACfnF,KAAK4E,SAASvJ,UAAU5E,IAAIqzB,IAC5B9pB,KAAK4E,SAASvJ,UAAU1B,OAAOqwB,GAAoBD,IACnDxpB,GAAaqB,QAAQ5B,KAAK4E,SAAU+kB,GAAa,GAGrB3pB,KAAK4E,SAAU5E,KAAK6E,QAAQub,YAC5D,CACA,OAAArb,GACE/E,KAAKqqB,gBACDrqB,KAAKuqB,WACPvqB,KAAK4E,SAASvJ,UAAU1B,OAAOowB,IAEjCplB,MAAMI,SACR,CACA,OAAAwlB,GACE,OAAOvqB,KAAK4E,SAASvJ,UAAU7W,SAASulC,GAC1C,CAIA,kBAAAO,GACOtqB,KAAK6E,QAAQolB,WAGdjqB,KAAKmqB,sBAAwBnqB,KAAKoqB,0BAGtCpqB,KAAK4gB,SAAW/iB,YAAW,KACzBmC,KAAK4P,MAAM,GACV5P,KAAK6E,QAAQ0b,QAClB,CACA,cAAAiK,CAAeprB,EAAOqrB,GACpB,OAAQrrB,EAAMqB,MACZ,IAAK,YACL,IAAK,WAEDT,KAAKmqB,qBAAuBM,EAC5B,MAEJ,IAAK,UACL,IAAK,WAEDzqB,KAAKoqB,wBAA0BK,EAIrC,GAAIA,EAEF,YADAzqB,KAAKqqB,gBAGP,MAAM5c,EAAcrO,EAAMU,cACtBE,KAAK4E,WAAa6I,GAAezN,KAAK4E,SAASpgB,SAASipB,IAG5DzN,KAAKsqB,oBACP,CACA,aAAApJ,GACE3gB,GAAac,GAAGrB,KAAK4E,SAAU0kB,IAAiBlqB,GAASY,KAAKwqB,eAAeprB,GAAO,KACpFmB,GAAac,GAAGrB,KAAK4E,SAAU2kB,IAAgBnqB,GAASY,KAAKwqB,eAAeprB,GAAO,KACnFmB,GAAac,GAAGrB,KAAK4E,SAAU4kB,IAAepqB,GAASY,KAAKwqB,eAAeprB,GAAO,KAClFmB,GAAac,GAAGrB,KAAK4E,SAAU6kB,IAAgBrqB,GAASY,KAAKwqB,eAAeprB,GAAO,IACrF,CACA,aAAAirB,GACEnd,aAAalN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW,IAClB,CAGA,sBAAOnkB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO6/B,GAAM5kB,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KACf,CACF,GACF,ECr0IK,SAAS0qB,GAAcruB,GACD,WAAvBhX,SAASuX,WAAyBP,IACjChX,SAASyF,iBAAiB,mBAAoBuR,EACrD,CDy0IAwK,GAAqBqjB,IAMrB/tB,GAAmB+tB,IEpyInBQ,IAzCA,WAC2B,GAAGt4B,MAAM5U,KAChC6H,SAAS+a,iBAAiB,+BAETtd,KAAI,SAAU6nC,GAC/B,OAAO,IAAI,GAAkBA,EAAkB,CAC7CpK,MAAO,CAAE1Q,KAAM,IAAKD,KAAM,MAE9B,GACF,IAiCA8a,IA5BA,WACYrlC,SAASm9B,eAAe,mBAC9B13B,iBAAiB,SAAS,WAC5BzF,SAAS6G,KAAKT,UAAY,EAC1BpG,SAASC,gBAAgBmG,UAAY,CACvC,GACF,IAuBAi/B,IArBA,WACE,IAAIE,EAAMvlC,SAASm9B,eAAe,mBAC9BqI,EAASxlC,SACVylC,uBAAuB,aAAa,GACpCxnC,wBACH1D,OAAOkL,iBAAiB,UAAU,WAC5BkV,KAAK+qB,UAAY/qB,KAAKgrB,SAAWhrB,KAAKgrB,QAAUH,EAAOjtC,OACzDgtC,EAAI7pC,MAAMgxB,QAAU,QAEpB6Y,EAAI7pC,MAAMgxB,QAAU,OAEtB/R,KAAK+qB,UAAY/qB,KAAKgrB,OACxB,GACF,IAUAprC,OAAOqrC,UAAY","sources":["webpack://pydata_sphinx_theme/webpack/bootstrap","webpack://pydata_sphinx_theme/webpack/runtime/define property getters","webpack://pydata_sphinx_theme/webpack/runtime/hasOwnProperty shorthand","webpack://pydata_sphinx_theme/webpack/runtime/make namespace object","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/enums.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/instanceOf.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/applyStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getBasePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/math.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/userAgent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isLayoutViewport.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getBoundingClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getLayoutRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/contains.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getComputedStyle.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isTableElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getParentNode.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getOffsetParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getMainAxisFromPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/within.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergePaddingObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getFreshSideObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/expandToHashMap.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/arrow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getVariation.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/computeStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/eventListeners.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositeVariationPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScrollBarX.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/listScrollParents.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/rectToClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getClippingRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getViewportRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/detectOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/flip.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeAutoPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/hide.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/offset.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/popperOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/preventOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getAltAxis.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getCompositeRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getHTMLElementScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/orderModifiers.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/createPopper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/debounce.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergeByName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper-lite.js","webpack://pydata_sphinx_theme/./node_modules/bootstrap/dist/js/bootstrap.esm.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/mixin.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/bootstrap.js"],"sourcesContent":["// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","export var top = 'top';\nexport var bottom = 'bottom';\nexport var right = 'right';\nexport var left = 'left';\nexport var auto = 'auto';\nexport var basePlacements = [top, bottom, right, left];\nexport var start = 'start';\nexport var end = 'end';\nexport var clippingParents = 'clippingParents';\nexport var viewport = 'viewport';\nexport var popper = 'popper';\nexport var reference = 'reference';\nexport var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {\n return acc.concat([placement + \"-\" + start, placement + \"-\" + end]);\n}, []);\nexport var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {\n return acc.concat([placement, placement + \"-\" + start, placement + \"-\" + end]);\n}, []); // modifiers that need to read the DOM\n\nexport var beforeRead = 'beforeRead';\nexport var read = 'read';\nexport var afterRead = 'afterRead'; // pure-logic modifiers\n\nexport var beforeMain = 'beforeMain';\nexport var main = 'main';\nexport var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)\n\nexport var beforeWrite = 'beforeWrite';\nexport var write = 'write';\nexport var afterWrite = 'afterWrite';\nexport var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];","export default function getNodeName(element) {\n return element ? (element.nodeName || '').toLowerCase() : null;\n}","export default function getWindow(node) {\n if (node == null) {\n return window;\n }\n\n if (node.toString() !== '[object Window]') {\n var ownerDocument = node.ownerDocument;\n return ownerDocument ? ownerDocument.defaultView || window : window;\n }\n\n return node;\n}","import getWindow from \"./getWindow.js\";\n\nfunction isElement(node) {\n var OwnElement = getWindow(node).Element;\n return node instanceof OwnElement || node instanceof Element;\n}\n\nfunction isHTMLElement(node) {\n var OwnElement = getWindow(node).HTMLElement;\n return node instanceof OwnElement || node instanceof HTMLElement;\n}\n\nfunction isShadowRoot(node) {\n // IE 11 has no ShadowRoot\n if (typeof ShadowRoot === 'undefined') {\n return false;\n }\n\n var OwnElement = getWindow(node).ShadowRoot;\n return node instanceof OwnElement || node instanceof ShadowRoot;\n}\n\nexport { isElement, isHTMLElement, isShadowRoot };","import getNodeName from \"../dom-utils/getNodeName.js\";\nimport { isHTMLElement } from \"../dom-utils/instanceOf.js\"; // This modifier takes the styles prepared by the `computeStyles` modifier\n// and applies them to the HTMLElements such as popper and arrow\n\nfunction applyStyles(_ref) {\n var state = _ref.state;\n Object.keys(state.elements).forEach(function (name) {\n var style = state.styles[name] || {};\n var attributes = state.attributes[name] || {};\n var element = state.elements[name]; // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n } // Flow doesn't support to extend this property, but it's the most\n // effective way to apply styles to an HTMLElement\n // $FlowFixMe[cannot-write]\n\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (name) {\n var value = attributes[name];\n\n if (value === false) {\n element.removeAttribute(name);\n } else {\n element.setAttribute(name, value === true ? '' : value);\n }\n });\n });\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state;\n var initialStyles = {\n popper: {\n position: state.options.strategy,\n left: '0',\n top: '0',\n margin: '0'\n },\n arrow: {\n position: 'absolute'\n },\n reference: {}\n };\n Object.assign(state.elements.popper.style, initialStyles.popper);\n state.styles = initialStyles;\n\n if (state.elements.arrow) {\n Object.assign(state.elements.arrow.style, initialStyles.arrow);\n }\n\n return function () {\n Object.keys(state.elements).forEach(function (name) {\n var element = state.elements[name];\n var attributes = state.attributes[name] || {};\n var styleProperties = Object.keys(state.styles.hasOwnProperty(name) ? state.styles[name] : initialStyles[name]); // Set all values to an empty string to unset them\n\n var style = styleProperties.reduce(function (style, property) {\n style[property] = '';\n return style;\n }, {}); // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n }\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (attribute) {\n element.removeAttribute(attribute);\n });\n });\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'applyStyles',\n enabled: true,\n phase: 'write',\n fn: applyStyles,\n effect: effect,\n requires: ['computeStyles']\n};","import { auto } from \"../enums.js\";\nexport default function getBasePlacement(placement) {\n return placement.split('-')[0];\n}","export var max = Math.max;\nexport var min = Math.min;\nexport var round = Math.round;","export default function getUAString() {\n var uaData = navigator.userAgentData;\n\n if (uaData != null && uaData.brands && Array.isArray(uaData.brands)) {\n return uaData.brands.map(function (item) {\n return item.brand + \"/\" + item.version;\n }).join(' ');\n }\n\n return navigator.userAgent;\n}","import getUAString from \"../utils/userAgent.js\";\nexport default function isLayoutViewport() {\n return !/^((?!chrome|android).)*safari/i.test(getUAString());\n}","import { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport { round } from \"../utils/math.js\";\nimport getWindow from \"./getWindow.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getBoundingClientRect(element, includeScale, isFixedStrategy) {\n if (includeScale === void 0) {\n includeScale = false;\n }\n\n if (isFixedStrategy === void 0) {\n isFixedStrategy = false;\n }\n\n var clientRect = element.getBoundingClientRect();\n var scaleX = 1;\n var scaleY = 1;\n\n if (includeScale && isHTMLElement(element)) {\n scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;\n scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;\n }\n\n var _ref = isElement(element) ? getWindow(element) : window,\n visualViewport = _ref.visualViewport;\n\n var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;\n var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;\n var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;\n var width = clientRect.width / scaleX;\n var height = clientRect.height / scaleY;\n return {\n width: width,\n height: height,\n top: y,\n right: x + width,\n bottom: y + height,\n left: x,\n x: x,\n y: y\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\"; // Returns the layout rect of an element relative to its offsetParent. Layout\n// means it doesn't take into account transforms.\n\nexport default function getLayoutRect(element) {\n var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.\n // Fixes https://github.com/popperjs/popper-core/issues/1223\n\n var width = element.offsetWidth;\n var height = element.offsetHeight;\n\n if (Math.abs(clientRect.width - width) <= 1) {\n width = clientRect.width;\n }\n\n if (Math.abs(clientRect.height - height) <= 1) {\n height = clientRect.height;\n }\n\n return {\n x: element.offsetLeft,\n y: element.offsetTop,\n width: width,\n height: height\n };\n}","import { isShadowRoot } from \"./instanceOf.js\";\nexport default function contains(parent, child) {\n var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method\n\n if (parent.contains(child)) {\n return true;\n } // then fallback to custom implementation with Shadow DOM support\n else if (rootNode && isShadowRoot(rootNode)) {\n var next = child;\n\n do {\n if (next && parent.isSameNode(next)) {\n return true;\n } // $FlowFixMe[prop-missing]: need a better way to handle this...\n\n\n next = next.parentNode || next.host;\n } while (next);\n } // Give up, the result is false\n\n\n return false;\n}","import getWindow from \"./getWindow.js\";\nexport default function getComputedStyle(element) {\n return getWindow(element).getComputedStyle(element);\n}","import getNodeName from \"./getNodeName.js\";\nexport default function isTableElement(element) {\n return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;\n}","import { isElement } from \"./instanceOf.js\";\nexport default function getDocumentElement(element) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]\n element.document) || window.document).documentElement;\n}","import getNodeName from \"./getNodeName.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport { isShadowRoot } from \"./instanceOf.js\";\nexport default function getParentNode(element) {\n if (getNodeName(element) === 'html') {\n return element;\n }\n\n return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle\n // $FlowFixMe[incompatible-return]\n // $FlowFixMe[prop-missing]\n element.assignedSlot || // step into the shadow DOM of the parent of a slotted node\n element.parentNode || ( // DOM Element detected\n isShadowRoot(element) ? element.host : null) || // ShadowRoot detected\n // $FlowFixMe[incompatible-call]: HTMLElement is a Node\n getDocumentElement(element) // fallback\n\n );\n}","import getWindow from \"./getWindow.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isHTMLElement, isShadowRoot } from \"./instanceOf.js\";\nimport isTableElement from \"./isTableElement.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getUAString from \"../utils/userAgent.js\";\n\nfunction getTrueOffsetParent(element) {\n if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837\n getComputedStyle(element).position === 'fixed') {\n return null;\n }\n\n return element.offsetParent;\n} // `.offsetParent` reports `null` for fixed elements, while absolute elements\n// return the containing block\n\n\nfunction getContainingBlock(element) {\n var isFirefox = /firefox/i.test(getUAString());\n var isIE = /Trident/i.test(getUAString());\n\n if (isIE && isHTMLElement(element)) {\n // In IE 9, 10 and 11 fixed elements containing block is always established by the viewport\n var elementCss = getComputedStyle(element);\n\n if (elementCss.position === 'fixed') {\n return null;\n }\n }\n\n var currentNode = getParentNode(element);\n\n if (isShadowRoot(currentNode)) {\n currentNode = currentNode.host;\n }\n\n while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {\n var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that\n // create a containing block.\n // https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block\n\n if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {\n return currentNode;\n } else {\n currentNode = currentNode.parentNode;\n }\n }\n\n return null;\n} // Gets the closest ancestor positioned element. Handles some edge cases,\n// such as table ancestors and cross browser bugs.\n\n\nexport default function getOffsetParent(element) {\n var window = getWindow(element);\n var offsetParent = getTrueOffsetParent(element);\n\n while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {\n offsetParent = getTrueOffsetParent(offsetParent);\n }\n\n if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {\n return window;\n }\n\n return offsetParent || getContainingBlock(element) || window;\n}","export default function getMainAxisFromPlacement(placement) {\n return ['top', 'bottom'].indexOf(placement) >= 0 ? 'x' : 'y';\n}","import { max as mathMax, min as mathMin } from \"./math.js\";\nexport function within(min, value, max) {\n return mathMax(min, mathMin(value, max));\n}\nexport function withinMaxClamp(min, value, max) {\n var v = within(min, value, max);\n return v > max ? max : v;\n}","import getFreshSideObject from \"./getFreshSideObject.js\";\nexport default function mergePaddingObject(paddingObject) {\n return Object.assign({}, getFreshSideObject(), paddingObject);\n}","export default function getFreshSideObject() {\n return {\n top: 0,\n right: 0,\n bottom: 0,\n left: 0\n };\n}","export default function expandToHashMap(value, keys) {\n return keys.reduce(function (hashMap, key) {\n hashMap[key] = value;\n return hashMap;\n }, {});\n}","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport contains from \"../dom-utils/contains.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport { within } from \"../utils/within.js\";\nimport mergePaddingObject from \"../utils/mergePaddingObject.js\";\nimport expandToHashMap from \"../utils/expandToHashMap.js\";\nimport { left, right, basePlacements, top, bottom } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar toPaddingObject = function toPaddingObject(padding, state) {\n padding = typeof padding === 'function' ? padding(Object.assign({}, state.rects, {\n placement: state.placement\n })) : padding;\n return mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n};\n\nfunction arrow(_ref) {\n var _state$modifiersData$;\n\n var state = _ref.state,\n name = _ref.name,\n options = _ref.options;\n var arrowElement = state.elements.arrow;\n var popperOffsets = state.modifiersData.popperOffsets;\n var basePlacement = getBasePlacement(state.placement);\n var axis = getMainAxisFromPlacement(basePlacement);\n var isVertical = [left, right].indexOf(basePlacement) >= 0;\n var len = isVertical ? 'height' : 'width';\n\n if (!arrowElement || !popperOffsets) {\n return;\n }\n\n var paddingObject = toPaddingObject(options.padding, state);\n var arrowRect = getLayoutRect(arrowElement);\n var minProp = axis === 'y' ? top : left;\n var maxProp = axis === 'y' ? bottom : right;\n var endDiff = state.rects.reference[len] + state.rects.reference[axis] - popperOffsets[axis] - state.rects.popper[len];\n var startDiff = popperOffsets[axis] - state.rects.reference[axis];\n var arrowOffsetParent = getOffsetParent(arrowElement);\n var clientSize = arrowOffsetParent ? axis === 'y' ? arrowOffsetParent.clientHeight || 0 : arrowOffsetParent.clientWidth || 0 : 0;\n var centerToReference = endDiff / 2 - startDiff / 2; // Make sure the arrow doesn't overflow the popper if the center point is\n // outside of the popper bounds\n\n var min = paddingObject[minProp];\n var max = clientSize - arrowRect[len] - paddingObject[maxProp];\n var center = clientSize / 2 - arrowRect[len] / 2 + centerToReference;\n var offset = within(min, center, max); // Prevents breaking syntax highlighting...\n\n var axisProp = axis;\n state.modifiersData[name] = (_state$modifiersData$ = {}, _state$modifiersData$[axisProp] = offset, _state$modifiersData$.centerOffset = offset - center, _state$modifiersData$);\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state,\n options = _ref2.options;\n var _options$element = options.element,\n arrowElement = _options$element === void 0 ? '[data-popper-arrow]' : _options$element;\n\n if (arrowElement == null) {\n return;\n } // CSS selector\n\n\n if (typeof arrowElement === 'string') {\n arrowElement = state.elements.popper.querySelector(arrowElement);\n\n if (!arrowElement) {\n return;\n }\n }\n\n if (!contains(state.elements.popper, arrowElement)) {\n return;\n }\n\n state.elements.arrow = arrowElement;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'arrow',\n enabled: true,\n phase: 'main',\n fn: arrow,\n effect: effect,\n requires: ['popperOffsets'],\n requiresIfExists: ['preventOverflow']\n};","export default function getVariation(placement) {\n return placement.split('-')[1];\n}","import { top, left, right, bottom, end } from \"../enums.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getWindow from \"../dom-utils/getWindow.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getComputedStyle from \"../dom-utils/getComputedStyle.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport { round } from \"../utils/math.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar unsetSides = {\n top: 'auto',\n right: 'auto',\n bottom: 'auto',\n left: 'auto'\n}; // Round the offsets to the nearest suitable subpixel based on the DPR.\n// Zooming can change the DPR, but it seems to report a value that will\n// cleanly divide the values into the appropriate subpixels.\n\nfunction roundOffsetsByDPR(_ref, win) {\n var x = _ref.x,\n y = _ref.y;\n var dpr = win.devicePixelRatio || 1;\n return {\n x: round(x * dpr) / dpr || 0,\n y: round(y * dpr) / dpr || 0\n };\n}\n\nexport function mapToStyles(_ref2) {\n var _Object$assign2;\n\n var popper = _ref2.popper,\n popperRect = _ref2.popperRect,\n placement = _ref2.placement,\n variation = _ref2.variation,\n offsets = _ref2.offsets,\n position = _ref2.position,\n gpuAcceleration = _ref2.gpuAcceleration,\n adaptive = _ref2.adaptive,\n roundOffsets = _ref2.roundOffsets,\n isFixed = _ref2.isFixed;\n var _offsets$x = offsets.x,\n x = _offsets$x === void 0 ? 0 : _offsets$x,\n _offsets$y = offsets.y,\n y = _offsets$y === void 0 ? 0 : _offsets$y;\n\n var _ref3 = typeof roundOffsets === 'function' ? roundOffsets({\n x: x,\n y: y\n }) : {\n x: x,\n y: y\n };\n\n x = _ref3.x;\n y = _ref3.y;\n var hasX = offsets.hasOwnProperty('x');\n var hasY = offsets.hasOwnProperty('y');\n var sideX = left;\n var sideY = top;\n var win = window;\n\n if (adaptive) {\n var offsetParent = getOffsetParent(popper);\n var heightProp = 'clientHeight';\n var widthProp = 'clientWidth';\n\n if (offsetParent === getWindow(popper)) {\n offsetParent = getDocumentElement(popper);\n\n if (getComputedStyle(offsetParent).position !== 'static' && position === 'absolute') {\n heightProp = 'scrollHeight';\n widthProp = 'scrollWidth';\n }\n } // $FlowFixMe[incompatible-cast]: force type refinement, we compare offsetParent with window above, but Flow doesn't detect it\n\n\n offsetParent = offsetParent;\n\n if (placement === top || (placement === left || placement === right) && variation === end) {\n sideY = bottom;\n var offsetY = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.height : // $FlowFixMe[prop-missing]\n offsetParent[heightProp];\n y -= offsetY - popperRect.height;\n y *= gpuAcceleration ? 1 : -1;\n }\n\n if (placement === left || (placement === top || placement === bottom) && variation === end) {\n sideX = right;\n var offsetX = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.width : // $FlowFixMe[prop-missing]\n offsetParent[widthProp];\n x -= offsetX - popperRect.width;\n x *= gpuAcceleration ? 1 : -1;\n }\n }\n\n var commonStyles = Object.assign({\n position: position\n }, adaptive && unsetSides);\n\n var _ref4 = roundOffsets === true ? roundOffsetsByDPR({\n x: x,\n y: y\n }, getWindow(popper)) : {\n x: x,\n y: y\n };\n\n x = _ref4.x;\n y = _ref4.y;\n\n if (gpuAcceleration) {\n var _Object$assign;\n\n return Object.assign({}, commonStyles, (_Object$assign = {}, _Object$assign[sideY] = hasY ? '0' : '', _Object$assign[sideX] = hasX ? '0' : '', _Object$assign.transform = (win.devicePixelRatio || 1) <= 1 ? \"translate(\" + x + \"px, \" + y + \"px)\" : \"translate3d(\" + x + \"px, \" + y + \"px, 0)\", _Object$assign));\n }\n\n return Object.assign({}, commonStyles, (_Object$assign2 = {}, _Object$assign2[sideY] = hasY ? y + \"px\" : '', _Object$assign2[sideX] = hasX ? x + \"px\" : '', _Object$assign2.transform = '', _Object$assign2));\n}\n\nfunction computeStyles(_ref5) {\n var state = _ref5.state,\n options = _ref5.options;\n var _options$gpuAccelerat = options.gpuAcceleration,\n gpuAcceleration = _options$gpuAccelerat === void 0 ? true : _options$gpuAccelerat,\n _options$adaptive = options.adaptive,\n adaptive = _options$adaptive === void 0 ? true : _options$adaptive,\n _options$roundOffsets = options.roundOffsets,\n roundOffsets = _options$roundOffsets === void 0 ? true : _options$roundOffsets;\n var commonStyles = {\n placement: getBasePlacement(state.placement),\n variation: getVariation(state.placement),\n popper: state.elements.popper,\n popperRect: state.rects.popper,\n gpuAcceleration: gpuAcceleration,\n isFixed: state.options.strategy === 'fixed'\n };\n\n if (state.modifiersData.popperOffsets != null) {\n state.styles.popper = Object.assign({}, state.styles.popper, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.popperOffsets,\n position: state.options.strategy,\n adaptive: adaptive,\n roundOffsets: roundOffsets\n })));\n }\n\n if (state.modifiersData.arrow != null) {\n state.styles.arrow = Object.assign({}, state.styles.arrow, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.arrow,\n position: 'absolute',\n adaptive: false,\n roundOffsets: roundOffsets\n })));\n }\n\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-placement': state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'computeStyles',\n enabled: true,\n phase: 'beforeWrite',\n fn: computeStyles,\n data: {}\n};","import getWindow from \"../dom-utils/getWindow.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar passive = {\n passive: true\n};\n\nfunction effect(_ref) {\n var state = _ref.state,\n instance = _ref.instance,\n options = _ref.options;\n var _options$scroll = options.scroll,\n scroll = _options$scroll === void 0 ? true : _options$scroll,\n _options$resize = options.resize,\n resize = _options$resize === void 0 ? true : _options$resize;\n var window = getWindow(state.elements.popper);\n var scrollParents = [].concat(state.scrollParents.reference, state.scrollParents.popper);\n\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.addEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.addEventListener('resize', instance.update, passive);\n }\n\n return function () {\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.removeEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.removeEventListener('resize', instance.update, passive);\n }\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'eventListeners',\n enabled: true,\n phase: 'write',\n fn: function fn() {},\n effect: effect,\n data: {}\n};","var hash = {\n left: 'right',\n right: 'left',\n bottom: 'top',\n top: 'bottom'\n};\nexport default function getOppositePlacement(placement) {\n return placement.replace(/left|right|bottom|top/g, function (matched) {\n return hash[matched];\n });\n}","var hash = {\n start: 'end',\n end: 'start'\n};\nexport default function getOppositeVariationPlacement(placement) {\n return placement.replace(/start|end/g, function (matched) {\n return hash[matched];\n });\n}","import getWindow from \"./getWindow.js\";\nexport default function getWindowScroll(node) {\n var win = getWindow(node);\n var scrollLeft = win.pageXOffset;\n var scrollTop = win.pageYOffset;\n return {\n scrollLeft: scrollLeft,\n scrollTop: scrollTop\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nexport default function getWindowScrollBarX(element) {\n // If has a CSS width greater than the viewport, then this will be\n // incorrect for RTL.\n // Popper 1 is broken in this case and never had a bug report so let's assume\n // it's not an issue. I don't think anyone ever specifies width on \n // anyway.\n // Browsers where the left scrollbar doesn't cause an issue report `0` for\n // this (e.g. Edge 2019, IE11, Safari)\n return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;\n}","import getComputedStyle from \"./getComputedStyle.js\";\nexport default function isScrollParent(element) {\n // Firefox wants us to check `-x` and `-y` variations as well\n var _getComputedStyle = getComputedStyle(element),\n overflow = _getComputedStyle.overflow,\n overflowX = _getComputedStyle.overflowX,\n overflowY = _getComputedStyle.overflowY;\n\n return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);\n}","import getParentNode from \"./getParentNode.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nexport default function getScrollParent(node) {\n if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return node.ownerDocument.body;\n }\n\n if (isHTMLElement(node) && isScrollParent(node)) {\n return node;\n }\n\n return getScrollParent(getParentNode(node));\n}","import getScrollParent from \"./getScrollParent.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getWindow from \"./getWindow.js\";\nimport isScrollParent from \"./isScrollParent.js\";\n/*\ngiven a DOM element, return the list of all scroll parents, up the list of ancesors\nuntil we get to the top window object. This list is what we attach scroll listeners\nto, because if any of these parent elements scroll, we'll need to re-calculate the\nreference element's position.\n*/\n\nexport default function listScrollParents(element, list) {\n var _element$ownerDocumen;\n\n if (list === void 0) {\n list = [];\n }\n\n var scrollParent = getScrollParent(element);\n var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);\n var win = getWindow(scrollParent);\n var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;\n var updatedList = list.concat(target);\n return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here\n updatedList.concat(listScrollParents(getParentNode(target)));\n}","export default function rectToClientRect(rect) {\n return Object.assign({}, rect, {\n left: rect.x,\n top: rect.y,\n right: rect.x + rect.width,\n bottom: rect.y + rect.height\n });\n}","import { viewport } from \"../enums.js\";\nimport getViewportRect from \"./getViewportRect.js\";\nimport getDocumentRect from \"./getDocumentRect.js\";\nimport listScrollParents from \"./listScrollParents.js\";\nimport getOffsetParent from \"./getOffsetParent.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport contains from \"./contains.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport rectToClientRect from \"../utils/rectToClientRect.js\";\nimport { max, min } from \"../utils/math.js\";\n\nfunction getInnerBoundingClientRect(element, strategy) {\n var rect = getBoundingClientRect(element, false, strategy === 'fixed');\n rect.top = rect.top + element.clientTop;\n rect.left = rect.left + element.clientLeft;\n rect.bottom = rect.top + element.clientHeight;\n rect.right = rect.left + element.clientWidth;\n rect.width = element.clientWidth;\n rect.height = element.clientHeight;\n rect.x = rect.left;\n rect.y = rect.top;\n return rect;\n}\n\nfunction getClientRectFromMixedType(element, clippingParent, strategy) {\n return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));\n} // A \"clipping parent\" is an overflowable container with the characteristic of\n// clipping (or hiding) overflowing elements with a position different from\n// `initial`\n\n\nfunction getClippingParents(element) {\n var clippingParents = listScrollParents(getParentNode(element));\n var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;\n var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;\n\n if (!isElement(clipperElement)) {\n return [];\n } // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414\n\n\n return clippingParents.filter(function (clippingParent) {\n return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';\n });\n} // Gets the maximum area that the element is visible in due to any number of\n// clipping parents\n\n\nexport default function getClippingRect(element, boundary, rootBoundary, strategy) {\n var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);\n var clippingParents = [].concat(mainClippingParents, [rootBoundary]);\n var firstClippingParent = clippingParents[0];\n var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {\n var rect = getClientRectFromMixedType(element, clippingParent, strategy);\n accRect.top = max(rect.top, accRect.top);\n accRect.right = min(rect.right, accRect.right);\n accRect.bottom = min(rect.bottom, accRect.bottom);\n accRect.left = max(rect.left, accRect.left);\n return accRect;\n }, getClientRectFromMixedType(element, firstClippingParent, strategy));\n clippingRect.width = clippingRect.right - clippingRect.left;\n clippingRect.height = clippingRect.bottom - clippingRect.top;\n clippingRect.x = clippingRect.left;\n clippingRect.y = clippingRect.top;\n return clippingRect;\n}","import getWindow from \"./getWindow.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getViewportRect(element, strategy) {\n var win = getWindow(element);\n var html = getDocumentElement(element);\n var visualViewport = win.visualViewport;\n var width = html.clientWidth;\n var height = html.clientHeight;\n var x = 0;\n var y = 0;\n\n if (visualViewport) {\n width = visualViewport.width;\n height = visualViewport.height;\n var layoutViewport = isLayoutViewport();\n\n if (layoutViewport || !layoutViewport && strategy === 'fixed') {\n x = visualViewport.offsetLeft;\n y = visualViewport.offsetTop;\n }\n }\n\n return {\n width: width,\n height: height,\n x: x + getWindowScrollBarX(element),\n y: y\n };\n}","import getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nimport { max } from \"../utils/math.js\"; // Gets the entire size of the scrollable document area, even extending outside\n// of the `` and `` rect bounds if horizontally scrollable\n\nexport default function getDocumentRect(element) {\n var _element$ownerDocumen;\n\n var html = getDocumentElement(element);\n var winScroll = getWindowScroll(element);\n var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;\n var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);\n var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);\n var x = -winScroll.scrollLeft + getWindowScrollBarX(element);\n var y = -winScroll.scrollTop;\n\n if (getComputedStyle(body || html).direction === 'rtl') {\n x += max(html.clientWidth, body ? body.clientWidth : 0) - width;\n }\n\n return {\n width: width,\n height: height,\n x: x,\n y: y\n };\n}","import getBasePlacement from \"./getBasePlacement.js\";\nimport getVariation from \"./getVariation.js\";\nimport getMainAxisFromPlacement from \"./getMainAxisFromPlacement.js\";\nimport { top, right, bottom, left, start, end } from \"../enums.js\";\nexport default function computeOffsets(_ref) {\n var reference = _ref.reference,\n element = _ref.element,\n placement = _ref.placement;\n var basePlacement = placement ? getBasePlacement(placement) : null;\n var variation = placement ? getVariation(placement) : null;\n var commonX = reference.x + reference.width / 2 - element.width / 2;\n var commonY = reference.y + reference.height / 2 - element.height / 2;\n var offsets;\n\n switch (basePlacement) {\n case top:\n offsets = {\n x: commonX,\n y: reference.y - element.height\n };\n break;\n\n case bottom:\n offsets = {\n x: commonX,\n y: reference.y + reference.height\n };\n break;\n\n case right:\n offsets = {\n x: reference.x + reference.width,\n y: commonY\n };\n break;\n\n case left:\n offsets = {\n x: reference.x - element.width,\n y: commonY\n };\n break;\n\n default:\n offsets = {\n x: reference.x,\n y: reference.y\n };\n }\n\n var mainAxis = basePlacement ? getMainAxisFromPlacement(basePlacement) : null;\n\n if (mainAxis != null) {\n var len = mainAxis === 'y' ? 'height' : 'width';\n\n switch (variation) {\n case start:\n offsets[mainAxis] = offsets[mainAxis] - (reference[len] / 2 - element[len] / 2);\n break;\n\n case end:\n offsets[mainAxis] = offsets[mainAxis] + (reference[len] / 2 - element[len] / 2);\n break;\n\n default:\n }\n }\n\n return offsets;\n}","import getClippingRect from \"../dom-utils/getClippingRect.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getBoundingClientRect from \"../dom-utils/getBoundingClientRect.js\";\nimport computeOffsets from \"./computeOffsets.js\";\nimport rectToClientRect from \"./rectToClientRect.js\";\nimport { clippingParents, reference, popper, bottom, top, right, basePlacements, viewport } from \"../enums.js\";\nimport { isElement } from \"../dom-utils/instanceOf.js\";\nimport mergePaddingObject from \"./mergePaddingObject.js\";\nimport expandToHashMap from \"./expandToHashMap.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport default function detectOverflow(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n _options$placement = _options.placement,\n placement = _options$placement === void 0 ? state.placement : _options$placement,\n _options$strategy = _options.strategy,\n strategy = _options$strategy === void 0 ? state.strategy : _options$strategy,\n _options$boundary = _options.boundary,\n boundary = _options$boundary === void 0 ? clippingParents : _options$boundary,\n _options$rootBoundary = _options.rootBoundary,\n rootBoundary = _options$rootBoundary === void 0 ? viewport : _options$rootBoundary,\n _options$elementConte = _options.elementContext,\n elementContext = _options$elementConte === void 0 ? popper : _options$elementConte,\n _options$altBoundary = _options.altBoundary,\n altBoundary = _options$altBoundary === void 0 ? false : _options$altBoundary,\n _options$padding = _options.padding,\n padding = _options$padding === void 0 ? 0 : _options$padding;\n var paddingObject = mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n var altContext = elementContext === popper ? reference : popper;\n var popperRect = state.rects.popper;\n var element = state.elements[altBoundary ? altContext : elementContext];\n var clippingClientRect = getClippingRect(isElement(element) ? element : element.contextElement || getDocumentElement(state.elements.popper), boundary, rootBoundary, strategy);\n var referenceClientRect = getBoundingClientRect(state.elements.reference);\n var popperOffsets = computeOffsets({\n reference: referenceClientRect,\n element: popperRect,\n strategy: 'absolute',\n placement: placement\n });\n var popperClientRect = rectToClientRect(Object.assign({}, popperRect, popperOffsets));\n var elementClientRect = elementContext === popper ? popperClientRect : referenceClientRect; // positive = overflowing the clipping rect\n // 0 or negative = within the clipping rect\n\n var overflowOffsets = {\n top: clippingClientRect.top - elementClientRect.top + paddingObject.top,\n bottom: elementClientRect.bottom - clippingClientRect.bottom + paddingObject.bottom,\n left: clippingClientRect.left - elementClientRect.left + paddingObject.left,\n right: elementClientRect.right - clippingClientRect.right + paddingObject.right\n };\n var offsetData = state.modifiersData.offset; // Offsets can be applied only to the popper element\n\n if (elementContext === popper && offsetData) {\n var offset = offsetData[placement];\n Object.keys(overflowOffsets).forEach(function (key) {\n var multiply = [right, bottom].indexOf(key) >= 0 ? 1 : -1;\n var axis = [top, bottom].indexOf(key) >= 0 ? 'y' : 'x';\n overflowOffsets[key] += offset[axis] * multiply;\n });\n }\n\n return overflowOffsets;\n}","import getOppositePlacement from \"../utils/getOppositePlacement.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getOppositeVariationPlacement from \"../utils/getOppositeVariationPlacement.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport computeAutoPlacement from \"../utils/computeAutoPlacement.js\";\nimport { bottom, top, start, right, left, auto } from \"../enums.js\";\nimport getVariation from \"../utils/getVariation.js\"; // eslint-disable-next-line import/no-unused-modules\n\nfunction getExpandedFallbackPlacements(placement) {\n if (getBasePlacement(placement) === auto) {\n return [];\n }\n\n var oppositePlacement = getOppositePlacement(placement);\n return [getOppositeVariationPlacement(placement), oppositePlacement, getOppositeVariationPlacement(oppositePlacement)];\n}\n\nfunction flip(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n\n if (state.modifiersData[name]._skip) {\n return;\n }\n\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? true : _options$altAxis,\n specifiedFallbackPlacements = options.fallbackPlacements,\n padding = options.padding,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n _options$flipVariatio = options.flipVariations,\n flipVariations = _options$flipVariatio === void 0 ? true : _options$flipVariatio,\n allowedAutoPlacements = options.allowedAutoPlacements;\n var preferredPlacement = state.options.placement;\n var basePlacement = getBasePlacement(preferredPlacement);\n var isBasePlacement = basePlacement === preferredPlacement;\n var fallbackPlacements = specifiedFallbackPlacements || (isBasePlacement || !flipVariations ? [getOppositePlacement(preferredPlacement)] : getExpandedFallbackPlacements(preferredPlacement));\n var placements = [preferredPlacement].concat(fallbackPlacements).reduce(function (acc, placement) {\n return acc.concat(getBasePlacement(placement) === auto ? computeAutoPlacement(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n flipVariations: flipVariations,\n allowedAutoPlacements: allowedAutoPlacements\n }) : placement);\n }, []);\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var checksMap = new Map();\n var makeFallbackChecks = true;\n var firstFittingPlacement = placements[0];\n\n for (var i = 0; i < placements.length; i++) {\n var placement = placements[i];\n\n var _basePlacement = getBasePlacement(placement);\n\n var isStartVariation = getVariation(placement) === start;\n var isVertical = [top, bottom].indexOf(_basePlacement) >= 0;\n var len = isVertical ? 'width' : 'height';\n var overflow = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n altBoundary: altBoundary,\n padding: padding\n });\n var mainVariationSide = isVertical ? isStartVariation ? right : left : isStartVariation ? bottom : top;\n\n if (referenceRect[len] > popperRect[len]) {\n mainVariationSide = getOppositePlacement(mainVariationSide);\n }\n\n var altVariationSide = getOppositePlacement(mainVariationSide);\n var checks = [];\n\n if (checkMainAxis) {\n checks.push(overflow[_basePlacement] <= 0);\n }\n\n if (checkAltAxis) {\n checks.push(overflow[mainVariationSide] <= 0, overflow[altVariationSide] <= 0);\n }\n\n if (checks.every(function (check) {\n return check;\n })) {\n firstFittingPlacement = placement;\n makeFallbackChecks = false;\n break;\n }\n\n checksMap.set(placement, checks);\n }\n\n if (makeFallbackChecks) {\n // `2` may be desired in some cases – research later\n var numberOfChecks = flipVariations ? 3 : 1;\n\n var _loop = function _loop(_i) {\n var fittingPlacement = placements.find(function (placement) {\n var checks = checksMap.get(placement);\n\n if (checks) {\n return checks.slice(0, _i).every(function (check) {\n return check;\n });\n }\n });\n\n if (fittingPlacement) {\n firstFittingPlacement = fittingPlacement;\n return \"break\";\n }\n };\n\n for (var _i = numberOfChecks; _i > 0; _i--) {\n var _ret = _loop(_i);\n\n if (_ret === \"break\") break;\n }\n }\n\n if (state.placement !== firstFittingPlacement) {\n state.modifiersData[name]._skip = true;\n state.placement = firstFittingPlacement;\n state.reset = true;\n }\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'flip',\n enabled: true,\n phase: 'main',\n fn: flip,\n requiresIfExists: ['offset'],\n data: {\n _skip: false\n }\n};","import getVariation from \"./getVariation.js\";\nimport { variationPlacements, basePlacements, placements as allPlacements } from \"../enums.js\";\nimport detectOverflow from \"./detectOverflow.js\";\nimport getBasePlacement from \"./getBasePlacement.js\";\nexport default function computeAutoPlacement(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n placement = _options.placement,\n boundary = _options.boundary,\n rootBoundary = _options.rootBoundary,\n padding = _options.padding,\n flipVariations = _options.flipVariations,\n _options$allowedAutoP = _options.allowedAutoPlacements,\n allowedAutoPlacements = _options$allowedAutoP === void 0 ? allPlacements : _options$allowedAutoP;\n var variation = getVariation(placement);\n var placements = variation ? flipVariations ? variationPlacements : variationPlacements.filter(function (placement) {\n return getVariation(placement) === variation;\n }) : basePlacements;\n var allowedPlacements = placements.filter(function (placement) {\n return allowedAutoPlacements.indexOf(placement) >= 0;\n });\n\n if (allowedPlacements.length === 0) {\n allowedPlacements = placements;\n } // $FlowFixMe[incompatible-type]: Flow seems to have problems with two array unions...\n\n\n var overflows = allowedPlacements.reduce(function (acc, placement) {\n acc[placement] = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding\n })[getBasePlacement(placement)];\n return acc;\n }, {});\n return Object.keys(overflows).sort(function (a, b) {\n return overflows[a] - overflows[b];\n });\n}","import { top, bottom, left, right } from \"../enums.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\n\nfunction getSideOffsets(overflow, rect, preventedOffsets) {\n if (preventedOffsets === void 0) {\n preventedOffsets = {\n x: 0,\n y: 0\n };\n }\n\n return {\n top: overflow.top - rect.height - preventedOffsets.y,\n right: overflow.right - rect.width + preventedOffsets.x,\n bottom: overflow.bottom - rect.height + preventedOffsets.y,\n left: overflow.left - rect.width - preventedOffsets.x\n };\n}\n\nfunction isAnySideFullyClipped(overflow) {\n return [top, right, bottom, left].some(function (side) {\n return overflow[side] >= 0;\n });\n}\n\nfunction hide(_ref) {\n var state = _ref.state,\n name = _ref.name;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var preventedOffsets = state.modifiersData.preventOverflow;\n var referenceOverflow = detectOverflow(state, {\n elementContext: 'reference'\n });\n var popperAltOverflow = detectOverflow(state, {\n altBoundary: true\n });\n var referenceClippingOffsets = getSideOffsets(referenceOverflow, referenceRect);\n var popperEscapeOffsets = getSideOffsets(popperAltOverflow, popperRect, preventedOffsets);\n var isReferenceHidden = isAnySideFullyClipped(referenceClippingOffsets);\n var hasPopperEscaped = isAnySideFullyClipped(popperEscapeOffsets);\n state.modifiersData[name] = {\n referenceClippingOffsets: referenceClippingOffsets,\n popperEscapeOffsets: popperEscapeOffsets,\n isReferenceHidden: isReferenceHidden,\n hasPopperEscaped: hasPopperEscaped\n };\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-reference-hidden': isReferenceHidden,\n 'data-popper-escaped': hasPopperEscaped\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'hide',\n enabled: true,\n phase: 'main',\n requiresIfExists: ['preventOverflow'],\n fn: hide\n};","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport { top, left, right, placements } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport function distanceAndSkiddingToXY(placement, rects, offset) {\n var basePlacement = getBasePlacement(placement);\n var invertDistance = [left, top].indexOf(basePlacement) >= 0 ? -1 : 1;\n\n var _ref = typeof offset === 'function' ? offset(Object.assign({}, rects, {\n placement: placement\n })) : offset,\n skidding = _ref[0],\n distance = _ref[1];\n\n skidding = skidding || 0;\n distance = (distance || 0) * invertDistance;\n return [left, right].indexOf(basePlacement) >= 0 ? {\n x: distance,\n y: skidding\n } : {\n x: skidding,\n y: distance\n };\n}\n\nfunction offset(_ref2) {\n var state = _ref2.state,\n options = _ref2.options,\n name = _ref2.name;\n var _options$offset = options.offset,\n offset = _options$offset === void 0 ? [0, 0] : _options$offset;\n var data = placements.reduce(function (acc, placement) {\n acc[placement] = distanceAndSkiddingToXY(placement, state.rects, offset);\n return acc;\n }, {});\n var _data$state$placement = data[state.placement],\n x = _data$state$placement.x,\n y = _data$state$placement.y;\n\n if (state.modifiersData.popperOffsets != null) {\n state.modifiersData.popperOffsets.x += x;\n state.modifiersData.popperOffsets.y += y;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'offset',\n enabled: true,\n phase: 'main',\n requires: ['popperOffsets'],\n fn: offset\n};","import computeOffsets from \"../utils/computeOffsets.js\";\n\nfunction popperOffsets(_ref) {\n var state = _ref.state,\n name = _ref.name;\n // Offsets are the actual position the popper needs to have to be\n // properly positioned near its reference element\n // This is the most basic placement, and will be adjusted by\n // the modifiers in the next step\n state.modifiersData[name] = computeOffsets({\n reference: state.rects.reference,\n element: state.rects.popper,\n strategy: 'absolute',\n placement: state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'popperOffsets',\n enabled: true,\n phase: 'read',\n fn: popperOffsets,\n data: {}\n};","import { top, left, right, bottom, start } from \"../enums.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport getAltAxis from \"../utils/getAltAxis.js\";\nimport { within, withinMaxClamp } from \"../utils/within.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport getFreshSideObject from \"../utils/getFreshSideObject.js\";\nimport { min as mathMin, max as mathMax } from \"../utils/math.js\";\n\nfunction preventOverflow(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? false : _options$altAxis,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n padding = options.padding,\n _options$tether = options.tether,\n tether = _options$tether === void 0 ? true : _options$tether,\n _options$tetherOffset = options.tetherOffset,\n tetherOffset = _options$tetherOffset === void 0 ? 0 : _options$tetherOffset;\n var overflow = detectOverflow(state, {\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n altBoundary: altBoundary\n });\n var basePlacement = getBasePlacement(state.placement);\n var variation = getVariation(state.placement);\n var isBasePlacement = !variation;\n var mainAxis = getMainAxisFromPlacement(basePlacement);\n var altAxis = getAltAxis(mainAxis);\n var popperOffsets = state.modifiersData.popperOffsets;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var tetherOffsetValue = typeof tetherOffset === 'function' ? tetherOffset(Object.assign({}, state.rects, {\n placement: state.placement\n })) : tetherOffset;\n var normalizedTetherOffsetValue = typeof tetherOffsetValue === 'number' ? {\n mainAxis: tetherOffsetValue,\n altAxis: tetherOffsetValue\n } : Object.assign({\n mainAxis: 0,\n altAxis: 0\n }, tetherOffsetValue);\n var offsetModifierState = state.modifiersData.offset ? state.modifiersData.offset[state.placement] : null;\n var data = {\n x: 0,\n y: 0\n };\n\n if (!popperOffsets) {\n return;\n }\n\n if (checkMainAxis) {\n var _offsetModifierState$;\n\n var mainSide = mainAxis === 'y' ? top : left;\n var altSide = mainAxis === 'y' ? bottom : right;\n var len = mainAxis === 'y' ? 'height' : 'width';\n var offset = popperOffsets[mainAxis];\n var min = offset + overflow[mainSide];\n var max = offset - overflow[altSide];\n var additive = tether ? -popperRect[len] / 2 : 0;\n var minLen = variation === start ? referenceRect[len] : popperRect[len];\n var maxLen = variation === start ? -popperRect[len] : -referenceRect[len]; // We need to include the arrow in the calculation so the arrow doesn't go\n // outside the reference bounds\n\n var arrowElement = state.elements.arrow;\n var arrowRect = tether && arrowElement ? getLayoutRect(arrowElement) : {\n width: 0,\n height: 0\n };\n var arrowPaddingObject = state.modifiersData['arrow#persistent'] ? state.modifiersData['arrow#persistent'].padding : getFreshSideObject();\n var arrowPaddingMin = arrowPaddingObject[mainSide];\n var arrowPaddingMax = arrowPaddingObject[altSide]; // If the reference length is smaller than the arrow length, we don't want\n // to include its full size in the calculation. If the reference is small\n // and near the edge of a boundary, the popper can overflow even if the\n // reference is not overflowing as well (e.g. virtual elements with no\n // width or height)\n\n var arrowLen = within(0, referenceRect[len], arrowRect[len]);\n var minOffset = isBasePlacement ? referenceRect[len] / 2 - additive - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis : minLen - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis;\n var maxOffset = isBasePlacement ? -referenceRect[len] / 2 + additive + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis : maxLen + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis;\n var arrowOffsetParent = state.elements.arrow && getOffsetParent(state.elements.arrow);\n var clientOffset = arrowOffsetParent ? mainAxis === 'y' ? arrowOffsetParent.clientTop || 0 : arrowOffsetParent.clientLeft || 0 : 0;\n var offsetModifierValue = (_offsetModifierState$ = offsetModifierState == null ? void 0 : offsetModifierState[mainAxis]) != null ? _offsetModifierState$ : 0;\n var tetherMin = offset + minOffset - offsetModifierValue - clientOffset;\n var tetherMax = offset + maxOffset - offsetModifierValue;\n var preventedOffset = within(tether ? mathMin(min, tetherMin) : min, offset, tether ? mathMax(max, tetherMax) : max);\n popperOffsets[mainAxis] = preventedOffset;\n data[mainAxis] = preventedOffset - offset;\n }\n\n if (checkAltAxis) {\n var _offsetModifierState$2;\n\n var _mainSide = mainAxis === 'x' ? top : left;\n\n var _altSide = mainAxis === 'x' ? bottom : right;\n\n var _offset = popperOffsets[altAxis];\n\n var _len = altAxis === 'y' ? 'height' : 'width';\n\n var _min = _offset + overflow[_mainSide];\n\n var _max = _offset - overflow[_altSide];\n\n var isOriginSide = [top, left].indexOf(basePlacement) !== -1;\n\n var _offsetModifierValue = (_offsetModifierState$2 = offsetModifierState == null ? void 0 : offsetModifierState[altAxis]) != null ? _offsetModifierState$2 : 0;\n\n var _tetherMin = isOriginSide ? _min : _offset - referenceRect[_len] - popperRect[_len] - _offsetModifierValue + normalizedTetherOffsetValue.altAxis;\n\n var _tetherMax = isOriginSide ? _offset + referenceRect[_len] + popperRect[_len] - _offsetModifierValue - normalizedTetherOffsetValue.altAxis : _max;\n\n var _preventedOffset = tether && isOriginSide ? withinMaxClamp(_tetherMin, _offset, _tetherMax) : within(tether ? _tetherMin : _min, _offset, tether ? _tetherMax : _max);\n\n popperOffsets[altAxis] = _preventedOffset;\n data[altAxis] = _preventedOffset - _offset;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'preventOverflow',\n enabled: true,\n phase: 'main',\n fn: preventOverflow,\n requiresIfExists: ['offset']\n};","export default function getAltAxis(axis) {\n return axis === 'x' ? 'y' : 'x';\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getNodeScroll from \"./getNodeScroll.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport { round } from \"../utils/math.js\";\n\nfunction isElementScaled(element) {\n var rect = element.getBoundingClientRect();\n var scaleX = round(rect.width) / element.offsetWidth || 1;\n var scaleY = round(rect.height) / element.offsetHeight || 1;\n return scaleX !== 1 || scaleY !== 1;\n} // Returns the composite rect of an element relative to its offsetParent.\n// Composite means it takes into account transforms as well as layout.\n\n\nexport default function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {\n if (isFixed === void 0) {\n isFixed = false;\n }\n\n var isOffsetParentAnElement = isHTMLElement(offsetParent);\n var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);\n var documentElement = getDocumentElement(offsetParent);\n var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);\n var scroll = {\n scrollLeft: 0,\n scrollTop: 0\n };\n var offsets = {\n x: 0,\n y: 0\n };\n\n if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {\n if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078\n isScrollParent(documentElement)) {\n scroll = getNodeScroll(offsetParent);\n }\n\n if (isHTMLElement(offsetParent)) {\n offsets = getBoundingClientRect(offsetParent, true);\n offsets.x += offsetParent.clientLeft;\n offsets.y += offsetParent.clientTop;\n } else if (documentElement) {\n offsets.x = getWindowScrollBarX(documentElement);\n }\n }\n\n return {\n x: rect.left + scroll.scrollLeft - offsets.x,\n y: rect.top + scroll.scrollTop - offsets.y,\n width: rect.width,\n height: rect.height\n };\n}","import getWindowScroll from \"./getWindowScroll.js\";\nimport getWindow from \"./getWindow.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getHTMLElementScroll from \"./getHTMLElementScroll.js\";\nexport default function getNodeScroll(node) {\n if (node === getWindow(node) || !isHTMLElement(node)) {\n return getWindowScroll(node);\n } else {\n return getHTMLElementScroll(node);\n }\n}","export default function getHTMLElementScroll(element) {\n return {\n scrollLeft: element.scrollLeft,\n scrollTop: element.scrollTop\n };\n}","import { modifierPhases } from \"../enums.js\"; // source: https://stackoverflow.com/questions/49875255\n\nfunction order(modifiers) {\n var map = new Map();\n var visited = new Set();\n var result = [];\n modifiers.forEach(function (modifier) {\n map.set(modifier.name, modifier);\n }); // On visiting object, check for its dependencies and visit them recursively\n\n function sort(modifier) {\n visited.add(modifier.name);\n var requires = [].concat(modifier.requires || [], modifier.requiresIfExists || []);\n requires.forEach(function (dep) {\n if (!visited.has(dep)) {\n var depModifier = map.get(dep);\n\n if (depModifier) {\n sort(depModifier);\n }\n }\n });\n result.push(modifier);\n }\n\n modifiers.forEach(function (modifier) {\n if (!visited.has(modifier.name)) {\n // check for visited object\n sort(modifier);\n }\n });\n return result;\n}\n\nexport default function orderModifiers(modifiers) {\n // order based on dependencies\n var orderedModifiers = order(modifiers); // order based on phase\n\n return modifierPhases.reduce(function (acc, phase) {\n return acc.concat(orderedModifiers.filter(function (modifier) {\n return modifier.phase === phase;\n }));\n }, []);\n}","import getCompositeRect from \"./dom-utils/getCompositeRect.js\";\nimport getLayoutRect from \"./dom-utils/getLayoutRect.js\";\nimport listScrollParents from \"./dom-utils/listScrollParents.js\";\nimport getOffsetParent from \"./dom-utils/getOffsetParent.js\";\nimport orderModifiers from \"./utils/orderModifiers.js\";\nimport debounce from \"./utils/debounce.js\";\nimport mergeByName from \"./utils/mergeByName.js\";\nimport detectOverflow from \"./utils/detectOverflow.js\";\nimport { isElement } from \"./dom-utils/instanceOf.js\";\nvar DEFAULT_OPTIONS = {\n placement: 'bottom',\n modifiers: [],\n strategy: 'absolute'\n};\n\nfunction areValidElements() {\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n\n return !args.some(function (element) {\n return !(element && typeof element.getBoundingClientRect === 'function');\n });\n}\n\nexport function popperGenerator(generatorOptions) {\n if (generatorOptions === void 0) {\n generatorOptions = {};\n }\n\n var _generatorOptions = generatorOptions,\n _generatorOptions$def = _generatorOptions.defaultModifiers,\n defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,\n _generatorOptions$def2 = _generatorOptions.defaultOptions,\n defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;\n return function createPopper(reference, popper, options) {\n if (options === void 0) {\n options = defaultOptions;\n }\n\n var state = {\n placement: 'bottom',\n orderedModifiers: [],\n options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),\n modifiersData: {},\n elements: {\n reference: reference,\n popper: popper\n },\n attributes: {},\n styles: {}\n };\n var effectCleanupFns = [];\n var isDestroyed = false;\n var instance = {\n state: state,\n setOptions: function setOptions(setOptionsAction) {\n var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;\n cleanupModifierEffects();\n state.options = Object.assign({}, defaultOptions, state.options, options);\n state.scrollParents = {\n reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],\n popper: listScrollParents(popper)\n }; // Orders the modifiers based on their dependencies and `phase`\n // properties\n\n var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers\n\n state.orderedModifiers = orderedModifiers.filter(function (m) {\n return m.enabled;\n });\n runModifierEffects();\n return instance.update();\n },\n // Sync update – it will always be executed, even if not necessary. This\n // is useful for low frequency updates where sync behavior simplifies the\n // logic.\n // For high frequency updates (e.g. `resize` and `scroll` events), always\n // prefer the async Popper#update method\n forceUpdate: function forceUpdate() {\n if (isDestroyed) {\n return;\n }\n\n var _state$elements = state.elements,\n reference = _state$elements.reference,\n popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements\n // anymore\n\n if (!areValidElements(reference, popper)) {\n return;\n } // Store the reference and popper rects to be read by modifiers\n\n\n state.rects = {\n reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),\n popper: getLayoutRect(popper)\n }; // Modifiers have the ability to reset the current update cycle. The\n // most common use case for this is the `flip` modifier changing the\n // placement, which then needs to re-run all the modifiers, because the\n // logic was previously ran for the previous placement and is therefore\n // stale/incorrect\n\n state.reset = false;\n state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier\n // is filled with the initial data specified by the modifier. This means\n // it doesn't persist and is fresh on each update.\n // To ensure persistent data, use `${name}#persistent`\n\n state.orderedModifiers.forEach(function (modifier) {\n return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);\n });\n\n for (var index = 0; index < state.orderedModifiers.length; index++) {\n if (state.reset === true) {\n state.reset = false;\n index = -1;\n continue;\n }\n\n var _state$orderedModifie = state.orderedModifiers[index],\n fn = _state$orderedModifie.fn,\n _state$orderedModifie2 = _state$orderedModifie.options,\n _options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,\n name = _state$orderedModifie.name;\n\n if (typeof fn === 'function') {\n state = fn({\n state: state,\n options: _options,\n name: name,\n instance: instance\n }) || state;\n }\n }\n },\n // Async and optimistically optimized update – it will not be executed if\n // not necessary (debounced to run at most once-per-tick)\n update: debounce(function () {\n return new Promise(function (resolve) {\n instance.forceUpdate();\n resolve(state);\n });\n }),\n destroy: function destroy() {\n cleanupModifierEffects();\n isDestroyed = true;\n }\n };\n\n if (!areValidElements(reference, popper)) {\n return instance;\n }\n\n instance.setOptions(options).then(function (state) {\n if (!isDestroyed && options.onFirstUpdate) {\n options.onFirstUpdate(state);\n }\n }); // Modifiers have the ability to execute arbitrary code before the first\n // update cycle runs. They will be executed in the same order as the update\n // cycle. This is useful when a modifier adds some persistent data that\n // other modifiers need to use, but the modifier is run after the dependent\n // one.\n\n function runModifierEffects() {\n state.orderedModifiers.forEach(function (_ref) {\n var name = _ref.name,\n _ref$options = _ref.options,\n options = _ref$options === void 0 ? {} : _ref$options,\n effect = _ref.effect;\n\n if (typeof effect === 'function') {\n var cleanupFn = effect({\n state: state,\n name: name,\n instance: instance,\n options: options\n });\n\n var noopFn = function noopFn() {};\n\n effectCleanupFns.push(cleanupFn || noopFn);\n }\n });\n }\n\n function cleanupModifierEffects() {\n effectCleanupFns.forEach(function (fn) {\n return fn();\n });\n effectCleanupFns = [];\n }\n\n return instance;\n };\n}\nexport var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules\n\nexport { detectOverflow };","export default function debounce(fn) {\n var pending;\n return function () {\n if (!pending) {\n pending = new Promise(function (resolve) {\n Promise.resolve().then(function () {\n pending = undefined;\n resolve(fn());\n });\n });\n }\n\n return pending;\n };\n}","export default function mergeByName(modifiers) {\n var merged = modifiers.reduce(function (merged, current) {\n var existing = merged[current.name];\n merged[current.name] = existing ? Object.assign({}, existing, current, {\n options: Object.assign({}, existing.options, current.options),\n data: Object.assign({}, existing.data, current.data)\n }) : current;\n return merged;\n }, {}); // IE11 does not support Object.values\n\n return Object.keys(merged).map(function (key) {\n return merged[key];\n });\n}","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nimport offset from \"./modifiers/offset.js\";\nimport flip from \"./modifiers/flip.js\";\nimport preventOverflow from \"./modifiers/preventOverflow.js\";\nimport arrow from \"./modifiers/arrow.js\";\nimport hide from \"./modifiers/hide.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles, offset, flip, preventOverflow, arrow, hide];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow }; // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper as createPopperLite } from \"./popper-lite.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport * from \"./modifiers/index.js\";","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow };","/*!\n * Bootstrap v5.3.3 (https://getbootstrap.com/)\n * Copyright 2011-2024 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n */\nimport * as Popper from '@popperjs/core';\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/data.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n/**\n * Constants\n */\n\nconst elementMap = new Map();\nconst Data = {\n set(element, key, instance) {\n if (!elementMap.has(element)) {\n elementMap.set(element, new Map());\n }\n const instanceMap = elementMap.get(element);\n\n // make it clear we only want one instance per element\n // can be removed later when multiple key/instances are fine to be used\n if (!instanceMap.has(key) && instanceMap.size !== 0) {\n // eslint-disable-next-line no-console\n console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(instanceMap.keys())[0]}.`);\n return;\n }\n instanceMap.set(key, instance);\n },\n get(element, key) {\n if (elementMap.has(element)) {\n return elementMap.get(element).get(key) || null;\n }\n return null;\n },\n remove(element, key) {\n if (!elementMap.has(element)) {\n return;\n }\n const instanceMap = elementMap.get(element);\n instanceMap.delete(key);\n\n // free up element references if there are no instances left for an element\n if (instanceMap.size === 0) {\n elementMap.delete(element);\n }\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/index.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst MAX_UID = 1000000;\nconst MILLISECONDS_MULTIPLIER = 1000;\nconst TRANSITION_END = 'transitionend';\n\n/**\n * Properly escape IDs selectors to handle weird IDs\n * @param {string} selector\n * @returns {string}\n */\nconst parseSelector = selector => {\n if (selector && window.CSS && window.CSS.escape) {\n // document.querySelector needs escaping to handle IDs (html5+) containing for instance /\n selector = selector.replace(/#([^\\s\"#']+)/g, (match, id) => `#${CSS.escape(id)}`);\n }\n return selector;\n};\n\n// Shout-out Angus Croll (https://goo.gl/pxwQGp)\nconst toType = object => {\n if (object === null || object === undefined) {\n return `${object}`;\n }\n return Object.prototype.toString.call(object).match(/\\s([a-z]+)/i)[1].toLowerCase();\n};\n\n/**\n * Public Util API\n */\n\nconst getUID = prefix => {\n do {\n prefix += Math.floor(Math.random() * MAX_UID);\n } while (document.getElementById(prefix));\n return prefix;\n};\nconst getTransitionDurationFromElement = element => {\n if (!element) {\n return 0;\n }\n\n // Get transition-duration of the element\n let {\n transitionDuration,\n transitionDelay\n } = window.getComputedStyle(element);\n const floatTransitionDuration = Number.parseFloat(transitionDuration);\n const floatTransitionDelay = Number.parseFloat(transitionDelay);\n\n // Return 0 if element or transition duration is not found\n if (!floatTransitionDuration && !floatTransitionDelay) {\n return 0;\n }\n\n // If multiple durations are defined, take the first\n transitionDuration = transitionDuration.split(',')[0];\n transitionDelay = transitionDelay.split(',')[0];\n return (Number.parseFloat(transitionDuration) + Number.parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;\n};\nconst triggerTransitionEnd = element => {\n element.dispatchEvent(new Event(TRANSITION_END));\n};\nconst isElement = object => {\n if (!object || typeof object !== 'object') {\n return false;\n }\n if (typeof object.jquery !== 'undefined') {\n object = object[0];\n }\n return typeof object.nodeType !== 'undefined';\n};\nconst getElement = object => {\n // it's a jQuery object or a node element\n if (isElement(object)) {\n return object.jquery ? object[0] : object;\n }\n if (typeof object === 'string' && object.length > 0) {\n return document.querySelector(parseSelector(object));\n }\n return null;\n};\nconst isVisible = element => {\n if (!isElement(element) || element.getClientRects().length === 0) {\n return false;\n }\n const elementIsVisible = getComputedStyle(element).getPropertyValue('visibility') === 'visible';\n // Handle `details` element as its content may falsie appear visible when it is closed\n const closedDetails = element.closest('details:not([open])');\n if (!closedDetails) {\n return elementIsVisible;\n }\n if (closedDetails !== element) {\n const summary = element.closest('summary');\n if (summary && summary.parentNode !== closedDetails) {\n return false;\n }\n if (summary === null) {\n return false;\n }\n }\n return elementIsVisible;\n};\nconst isDisabled = element => {\n if (!element || element.nodeType !== Node.ELEMENT_NODE) {\n return true;\n }\n if (element.classList.contains('disabled')) {\n return true;\n }\n if (typeof element.disabled !== 'undefined') {\n return element.disabled;\n }\n return element.hasAttribute('disabled') && element.getAttribute('disabled') !== 'false';\n};\nconst findShadowRoot = element => {\n if (!document.documentElement.attachShadow) {\n return null;\n }\n\n // Can find the shadow root otherwise it'll return the document\n if (typeof element.getRootNode === 'function') {\n const root = element.getRootNode();\n return root instanceof ShadowRoot ? root : null;\n }\n if (element instanceof ShadowRoot) {\n return element;\n }\n\n // when we don't find a shadow root\n if (!element.parentNode) {\n return null;\n }\n return findShadowRoot(element.parentNode);\n};\nconst noop = () => {};\n\n/**\n * Trick to restart an element's animation\n *\n * @param {HTMLElement} element\n * @return void\n *\n * @see https://www.charistheo.io/blog/2021/02/restart-a-css-animation-with-javascript/#restarting-a-css-animation\n */\nconst reflow = element => {\n element.offsetHeight; // eslint-disable-line no-unused-expressions\n};\nconst getjQuery = () => {\n if (window.jQuery && !document.body.hasAttribute('data-bs-no-jquery')) {\n return window.jQuery;\n }\n return null;\n};\nconst DOMContentLoadedCallbacks = [];\nconst onDOMContentLoaded = callback => {\n if (document.readyState === 'loading') {\n // add listener on the first call when the document is in loading state\n if (!DOMContentLoadedCallbacks.length) {\n document.addEventListener('DOMContentLoaded', () => {\n for (const callback of DOMContentLoadedCallbacks) {\n callback();\n }\n });\n }\n DOMContentLoadedCallbacks.push(callback);\n } else {\n callback();\n }\n};\nconst isRTL = () => document.documentElement.dir === 'rtl';\nconst defineJQueryPlugin = plugin => {\n onDOMContentLoaded(() => {\n const $ = getjQuery();\n /* istanbul ignore if */\n if ($) {\n const name = plugin.NAME;\n const JQUERY_NO_CONFLICT = $.fn[name];\n $.fn[name] = plugin.jQueryInterface;\n $.fn[name].Constructor = plugin;\n $.fn[name].noConflict = () => {\n $.fn[name] = JQUERY_NO_CONFLICT;\n return plugin.jQueryInterface;\n };\n }\n });\n};\nconst execute = (possibleCallback, args = [], defaultValue = possibleCallback) => {\n return typeof possibleCallback === 'function' ? possibleCallback(...args) : defaultValue;\n};\nconst executeAfterTransition = (callback, transitionElement, waitForTransition = true) => {\n if (!waitForTransition) {\n execute(callback);\n return;\n }\n const durationPadding = 5;\n const emulatedDuration = getTransitionDurationFromElement(transitionElement) + durationPadding;\n let called = false;\n const handler = ({\n target\n }) => {\n if (target !== transitionElement) {\n return;\n }\n called = true;\n transitionElement.removeEventListener(TRANSITION_END, handler);\n execute(callback);\n };\n transitionElement.addEventListener(TRANSITION_END, handler);\n setTimeout(() => {\n if (!called) {\n triggerTransitionEnd(transitionElement);\n }\n }, emulatedDuration);\n};\n\n/**\n * Return the previous/next element of a list.\n *\n * @param {array} list The list of elements\n * @param activeElement The active element\n * @param shouldGetNext Choose to get next or previous element\n * @param isCycleAllowed\n * @return {Element|elem} The proper element\n */\nconst getNextActiveElement = (list, activeElement, shouldGetNext, isCycleAllowed) => {\n const listLength = list.length;\n let index = list.indexOf(activeElement);\n\n // if the element does not exist in the list return an element\n // depending on the direction and if cycle is allowed\n if (index === -1) {\n return !shouldGetNext && isCycleAllowed ? list[listLength - 1] : list[0];\n }\n index += shouldGetNext ? 1 : -1;\n if (isCycleAllowed) {\n index = (index + listLength) % listLength;\n }\n return list[Math.max(0, Math.min(index, listLength - 1))];\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/event-handler.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst namespaceRegex = /[^.]*(?=\\..*)\\.|.*/;\nconst stripNameRegex = /\\..*/;\nconst stripUidRegex = /::\\d+$/;\nconst eventRegistry = {}; // Events storage\nlet uidEvent = 1;\nconst customEvents = {\n mouseenter: 'mouseover',\n mouseleave: 'mouseout'\n};\nconst nativeEvents = new Set(['click', 'dblclick', 'mouseup', 'mousedown', 'contextmenu', 'mousewheel', 'DOMMouseScroll', 'mouseover', 'mouseout', 'mousemove', 'selectstart', 'selectend', 'keydown', 'keypress', 'keyup', 'orientationchange', 'touchstart', 'touchmove', 'touchend', 'touchcancel', 'pointerdown', 'pointermove', 'pointerup', 'pointerleave', 'pointercancel', 'gesturestart', 'gesturechange', 'gestureend', 'focus', 'blur', 'change', 'reset', 'select', 'submit', 'focusin', 'focusout', 'load', 'unload', 'beforeunload', 'resize', 'move', 'DOMContentLoaded', 'readystatechange', 'error', 'abort', 'scroll']);\n\n/**\n * Private methods\n */\n\nfunction makeEventUid(element, uid) {\n return uid && `${uid}::${uidEvent++}` || element.uidEvent || uidEvent++;\n}\nfunction getElementEvents(element) {\n const uid = makeEventUid(element);\n element.uidEvent = uid;\n eventRegistry[uid] = eventRegistry[uid] || {};\n return eventRegistry[uid];\n}\nfunction bootstrapHandler(element, fn) {\n return function handler(event) {\n hydrateObj(event, {\n delegateTarget: element\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, fn);\n }\n return fn.apply(element, [event]);\n };\n}\nfunction bootstrapDelegationHandler(element, selector, fn) {\n return function handler(event) {\n const domElements = element.querySelectorAll(selector);\n for (let {\n target\n } = event; target && target !== this; target = target.parentNode) {\n for (const domElement of domElements) {\n if (domElement !== target) {\n continue;\n }\n hydrateObj(event, {\n delegateTarget: target\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, selector, fn);\n }\n return fn.apply(target, [event]);\n }\n }\n };\n}\nfunction findHandler(events, callable, delegationSelector = null) {\n return Object.values(events).find(event => event.callable === callable && event.delegationSelector === delegationSelector);\n}\nfunction normalizeParameters(originalTypeEvent, handler, delegationFunction) {\n const isDelegated = typeof handler === 'string';\n // TODO: tooltip passes `false` instead of selector, so we need to check\n const callable = isDelegated ? delegationFunction : handler || delegationFunction;\n let typeEvent = getTypeEvent(originalTypeEvent);\n if (!nativeEvents.has(typeEvent)) {\n typeEvent = originalTypeEvent;\n }\n return [isDelegated, callable, typeEvent];\n}\nfunction addHandler(element, originalTypeEvent, handler, delegationFunction, oneOff) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n let [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n\n // in case of mouseenter or mouseleave wrap the handler within a function that checks for its DOM position\n // this prevents the handler from being dispatched the same way as mouseover or mouseout does\n if (originalTypeEvent in customEvents) {\n const wrapFunction = fn => {\n return function (event) {\n if (!event.relatedTarget || event.relatedTarget !== event.delegateTarget && !event.delegateTarget.contains(event.relatedTarget)) {\n return fn.call(this, event);\n }\n };\n };\n callable = wrapFunction(callable);\n }\n const events = getElementEvents(element);\n const handlers = events[typeEvent] || (events[typeEvent] = {});\n const previousFunction = findHandler(handlers, callable, isDelegated ? handler : null);\n if (previousFunction) {\n previousFunction.oneOff = previousFunction.oneOff && oneOff;\n return;\n }\n const uid = makeEventUid(callable, originalTypeEvent.replace(namespaceRegex, ''));\n const fn = isDelegated ? bootstrapDelegationHandler(element, handler, callable) : bootstrapHandler(element, callable);\n fn.delegationSelector = isDelegated ? handler : null;\n fn.callable = callable;\n fn.oneOff = oneOff;\n fn.uidEvent = uid;\n handlers[uid] = fn;\n element.addEventListener(typeEvent, fn, isDelegated);\n}\nfunction removeHandler(element, events, typeEvent, handler, delegationSelector) {\n const fn = findHandler(events[typeEvent], handler, delegationSelector);\n if (!fn) {\n return;\n }\n element.removeEventListener(typeEvent, fn, Boolean(delegationSelector));\n delete events[typeEvent][fn.uidEvent];\n}\nfunction removeNamespacedHandlers(element, events, typeEvent, namespace) {\n const storeElementEvent = events[typeEvent] || {};\n for (const [handlerKey, event] of Object.entries(storeElementEvent)) {\n if (handlerKey.includes(namespace)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n}\nfunction getTypeEvent(event) {\n // allow to get the native events from namespaced events ('click.bs.button' --> 'click')\n event = event.replace(stripNameRegex, '');\n return customEvents[event] || event;\n}\nconst EventHandler = {\n on(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, false);\n },\n one(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, true);\n },\n off(element, originalTypeEvent, handler, delegationFunction) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n const [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n const inNamespace = typeEvent !== originalTypeEvent;\n const events = getElementEvents(element);\n const storeElementEvent = events[typeEvent] || {};\n const isNamespace = originalTypeEvent.startsWith('.');\n if (typeof callable !== 'undefined') {\n // Simplest case: handler is passed, remove that listener ONLY.\n if (!Object.keys(storeElementEvent).length) {\n return;\n }\n removeHandler(element, events, typeEvent, callable, isDelegated ? handler : null);\n return;\n }\n if (isNamespace) {\n for (const elementEvent of Object.keys(events)) {\n removeNamespacedHandlers(element, events, elementEvent, originalTypeEvent.slice(1));\n }\n }\n for (const [keyHandlers, event] of Object.entries(storeElementEvent)) {\n const handlerKey = keyHandlers.replace(stripUidRegex, '');\n if (!inNamespace || originalTypeEvent.includes(handlerKey)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n },\n trigger(element, event, args) {\n if (typeof event !== 'string' || !element) {\n return null;\n }\n const $ = getjQuery();\n const typeEvent = getTypeEvent(event);\n const inNamespace = event !== typeEvent;\n let jQueryEvent = null;\n let bubbles = true;\n let nativeDispatch = true;\n let defaultPrevented = false;\n if (inNamespace && $) {\n jQueryEvent = $.Event(event, args);\n $(element).trigger(jQueryEvent);\n bubbles = !jQueryEvent.isPropagationStopped();\n nativeDispatch = !jQueryEvent.isImmediatePropagationStopped();\n defaultPrevented = jQueryEvent.isDefaultPrevented();\n }\n const evt = hydrateObj(new Event(event, {\n bubbles,\n cancelable: true\n }), args);\n if (defaultPrevented) {\n evt.preventDefault();\n }\n if (nativeDispatch) {\n element.dispatchEvent(evt);\n }\n if (evt.defaultPrevented && jQueryEvent) {\n jQueryEvent.preventDefault();\n }\n return evt;\n }\n};\nfunction hydrateObj(obj, meta = {}) {\n for (const [key, value] of Object.entries(meta)) {\n try {\n obj[key] = value;\n } catch (_unused) {\n Object.defineProperty(obj, key, {\n configurable: true,\n get() {\n return value;\n }\n });\n }\n }\n return obj;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/manipulator.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nfunction normalizeData(value) {\n if (value === 'true') {\n return true;\n }\n if (value === 'false') {\n return false;\n }\n if (value === Number(value).toString()) {\n return Number(value);\n }\n if (value === '' || value === 'null') {\n return null;\n }\n if (typeof value !== 'string') {\n return value;\n }\n try {\n return JSON.parse(decodeURIComponent(value));\n } catch (_unused) {\n return value;\n }\n}\nfunction normalizeDataKey(key) {\n return key.replace(/[A-Z]/g, chr => `-${chr.toLowerCase()}`);\n}\nconst Manipulator = {\n setDataAttribute(element, key, value) {\n element.setAttribute(`data-bs-${normalizeDataKey(key)}`, value);\n },\n removeDataAttribute(element, key) {\n element.removeAttribute(`data-bs-${normalizeDataKey(key)}`);\n },\n getDataAttributes(element) {\n if (!element) {\n return {};\n }\n const attributes = {};\n const bsKeys = Object.keys(element.dataset).filter(key => key.startsWith('bs') && !key.startsWith('bsConfig'));\n for (const key of bsKeys) {\n let pureKey = key.replace(/^bs/, '');\n pureKey = pureKey.charAt(0).toLowerCase() + pureKey.slice(1, pureKey.length);\n attributes[pureKey] = normalizeData(element.dataset[key]);\n }\n return attributes;\n },\n getDataAttribute(element, key) {\n return normalizeData(element.getAttribute(`data-bs-${normalizeDataKey(key)}`));\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/config.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Class definition\n */\n\nclass Config {\n // Getters\n static get Default() {\n return {};\n }\n static get DefaultType() {\n return {};\n }\n static get NAME() {\n throw new Error('You have to implement the static method \"NAME\", for each component!');\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n return config;\n }\n _mergeConfigObj(config, element) {\n const jsonConfig = isElement(element) ? Manipulator.getDataAttribute(element, 'config') : {}; // try to parse\n\n return {\n ...this.constructor.Default,\n ...(typeof jsonConfig === 'object' ? jsonConfig : {}),\n ...(isElement(element) ? Manipulator.getDataAttributes(element) : {}),\n ...(typeof config === 'object' ? config : {})\n };\n }\n _typeCheckConfig(config, configTypes = this.constructor.DefaultType) {\n for (const [property, expectedTypes] of Object.entries(configTypes)) {\n const value = config[property];\n const valueType = isElement(value) ? 'element' : toType(value);\n if (!new RegExp(expectedTypes).test(valueType)) {\n throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option \"${property}\" provided type \"${valueType}\" but expected type \"${expectedTypes}\".`);\n }\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap base-component.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst VERSION = '5.3.3';\n\n/**\n * Class definition\n */\n\nclass BaseComponent extends Config {\n constructor(element, config) {\n super();\n element = getElement(element);\n if (!element) {\n return;\n }\n this._element = element;\n this._config = this._getConfig(config);\n Data.set(this._element, this.constructor.DATA_KEY, this);\n }\n\n // Public\n dispose() {\n Data.remove(this._element, this.constructor.DATA_KEY);\n EventHandler.off(this._element, this.constructor.EVENT_KEY);\n for (const propertyName of Object.getOwnPropertyNames(this)) {\n this[propertyName] = null;\n }\n }\n _queueCallback(callback, element, isAnimated = true) {\n executeAfterTransition(callback, element, isAnimated);\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config, this._element);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n\n // Static\n static getInstance(element) {\n return Data.get(getElement(element), this.DATA_KEY);\n }\n static getOrCreateInstance(element, config = {}) {\n return this.getInstance(element) || new this(element, typeof config === 'object' ? config : null);\n }\n static get VERSION() {\n return VERSION;\n }\n static get DATA_KEY() {\n return `bs.${this.NAME}`;\n }\n static get EVENT_KEY() {\n return `.${this.DATA_KEY}`;\n }\n static eventName(name) {\n return `${name}${this.EVENT_KEY}`;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/selector-engine.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst getSelector = element => {\n let selector = element.getAttribute('data-bs-target');\n if (!selector || selector === '#') {\n let hrefAttribute = element.getAttribute('href');\n\n // The only valid content that could double as a selector are IDs or classes,\n // so everything starting with `#` or `.`. If a \"real\" URL is used as the selector,\n // `document.querySelector` will rightfully complain it is invalid.\n // See https://github.com/twbs/bootstrap/issues/32273\n if (!hrefAttribute || !hrefAttribute.includes('#') && !hrefAttribute.startsWith('.')) {\n return null;\n }\n\n // Just in case some CMS puts out a full URL with the anchor appended\n if (hrefAttribute.includes('#') && !hrefAttribute.startsWith('#')) {\n hrefAttribute = `#${hrefAttribute.split('#')[1]}`;\n }\n selector = hrefAttribute && hrefAttribute !== '#' ? hrefAttribute.trim() : null;\n }\n return selector ? selector.split(',').map(sel => parseSelector(sel)).join(',') : null;\n};\nconst SelectorEngine = {\n find(selector, element = document.documentElement) {\n return [].concat(...Element.prototype.querySelectorAll.call(element, selector));\n },\n findOne(selector, element = document.documentElement) {\n return Element.prototype.querySelector.call(element, selector);\n },\n children(element, selector) {\n return [].concat(...element.children).filter(child => child.matches(selector));\n },\n parents(element, selector) {\n const parents = [];\n let ancestor = element.parentNode.closest(selector);\n while (ancestor) {\n parents.push(ancestor);\n ancestor = ancestor.parentNode.closest(selector);\n }\n return parents;\n },\n prev(element, selector) {\n let previous = element.previousElementSibling;\n while (previous) {\n if (previous.matches(selector)) {\n return [previous];\n }\n previous = previous.previousElementSibling;\n }\n return [];\n },\n // TODO: this is now unused; remove later along with prev()\n next(element, selector) {\n let next = element.nextElementSibling;\n while (next) {\n if (next.matches(selector)) {\n return [next];\n }\n next = next.nextElementSibling;\n }\n return [];\n },\n focusableChildren(element) {\n const focusables = ['a', 'button', 'input', 'textarea', 'select', 'details', '[tabindex]', '[contenteditable=\"true\"]'].map(selector => `${selector}:not([tabindex^=\"-\"])`).join(',');\n return this.find(focusables, element).filter(el => !isDisabled(el) && isVisible(el));\n },\n getSelectorFromElement(element) {\n const selector = getSelector(element);\n if (selector) {\n return SelectorEngine.findOne(selector) ? selector : null;\n }\n return null;\n },\n getElementFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.findOne(selector) : null;\n },\n getMultipleElementsFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.find(selector) : [];\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/component-functions.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst enableDismissTrigger = (component, method = 'hide') => {\n const clickEvent = `click.dismiss${component.EVENT_KEY}`;\n const name = component.NAME;\n EventHandler.on(document, clickEvent, `[data-bs-dismiss=\"${name}\"]`, function (event) {\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n const target = SelectorEngine.getElementFromSelector(this) || this.closest(`.${name}`);\n const instance = component.getOrCreateInstance(target);\n\n // Method argument is left, for Alert and only, as it doesn't implement the 'hide' method\n instance[method]();\n });\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap alert.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$f = 'alert';\nconst DATA_KEY$a = 'bs.alert';\nconst EVENT_KEY$b = `.${DATA_KEY$a}`;\nconst EVENT_CLOSE = `close${EVENT_KEY$b}`;\nconst EVENT_CLOSED = `closed${EVENT_KEY$b}`;\nconst CLASS_NAME_FADE$5 = 'fade';\nconst CLASS_NAME_SHOW$8 = 'show';\n\n/**\n * Class definition\n */\n\nclass Alert extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$f;\n }\n\n // Public\n close() {\n const closeEvent = EventHandler.trigger(this._element, EVENT_CLOSE);\n if (closeEvent.defaultPrevented) {\n return;\n }\n this._element.classList.remove(CLASS_NAME_SHOW$8);\n const isAnimated = this._element.classList.contains(CLASS_NAME_FADE$5);\n this._queueCallback(() => this._destroyElement(), this._element, isAnimated);\n }\n\n // Private\n _destroyElement() {\n this._element.remove();\n EventHandler.trigger(this._element, EVENT_CLOSED);\n this.dispose();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Alert.getOrCreateInstance(this);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nenableDismissTrigger(Alert, 'close');\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Alert);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap button.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$e = 'button';\nconst DATA_KEY$9 = 'bs.button';\nconst EVENT_KEY$a = `.${DATA_KEY$9}`;\nconst DATA_API_KEY$6 = '.data-api';\nconst CLASS_NAME_ACTIVE$3 = 'active';\nconst SELECTOR_DATA_TOGGLE$5 = '[data-bs-toggle=\"button\"]';\nconst EVENT_CLICK_DATA_API$6 = `click${EVENT_KEY$a}${DATA_API_KEY$6}`;\n\n/**\n * Class definition\n */\n\nclass Button extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$e;\n }\n\n // Public\n toggle() {\n // Toggle class and sync the `aria-pressed` attribute with the return value of the `.toggle()` method\n this._element.setAttribute('aria-pressed', this._element.classList.toggle(CLASS_NAME_ACTIVE$3));\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Button.getOrCreateInstance(this);\n if (config === 'toggle') {\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$6, SELECTOR_DATA_TOGGLE$5, event => {\n event.preventDefault();\n const button = event.target.closest(SELECTOR_DATA_TOGGLE$5);\n const data = Button.getOrCreateInstance(button);\n data.toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Button);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/swipe.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$d = 'swipe';\nconst EVENT_KEY$9 = '.bs.swipe';\nconst EVENT_TOUCHSTART = `touchstart${EVENT_KEY$9}`;\nconst EVENT_TOUCHMOVE = `touchmove${EVENT_KEY$9}`;\nconst EVENT_TOUCHEND = `touchend${EVENT_KEY$9}`;\nconst EVENT_POINTERDOWN = `pointerdown${EVENT_KEY$9}`;\nconst EVENT_POINTERUP = `pointerup${EVENT_KEY$9}`;\nconst POINTER_TYPE_TOUCH = 'touch';\nconst POINTER_TYPE_PEN = 'pen';\nconst CLASS_NAME_POINTER_EVENT = 'pointer-event';\nconst SWIPE_THRESHOLD = 40;\nconst Default$c = {\n endCallback: null,\n leftCallback: null,\n rightCallback: null\n};\nconst DefaultType$c = {\n endCallback: '(function|null)',\n leftCallback: '(function|null)',\n rightCallback: '(function|null)'\n};\n\n/**\n * Class definition\n */\n\nclass Swipe extends Config {\n constructor(element, config) {\n super();\n this._element = element;\n if (!element || !Swipe.isSupported()) {\n return;\n }\n this._config = this._getConfig(config);\n this._deltaX = 0;\n this._supportPointerEvents = Boolean(window.PointerEvent);\n this._initEvents();\n }\n\n // Getters\n static get Default() {\n return Default$c;\n }\n static get DefaultType() {\n return DefaultType$c;\n }\n static get NAME() {\n return NAME$d;\n }\n\n // Public\n dispose() {\n EventHandler.off(this._element, EVENT_KEY$9);\n }\n\n // Private\n _start(event) {\n if (!this._supportPointerEvents) {\n this._deltaX = event.touches[0].clientX;\n return;\n }\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX;\n }\n }\n _end(event) {\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX - this._deltaX;\n }\n this._handleSwipe();\n execute(this._config.endCallback);\n }\n _move(event) {\n this._deltaX = event.touches && event.touches.length > 1 ? 0 : event.touches[0].clientX - this._deltaX;\n }\n _handleSwipe() {\n const absDeltaX = Math.abs(this._deltaX);\n if (absDeltaX <= SWIPE_THRESHOLD) {\n return;\n }\n const direction = absDeltaX / this._deltaX;\n this._deltaX = 0;\n if (!direction) {\n return;\n }\n execute(direction > 0 ? this._config.rightCallback : this._config.leftCallback);\n }\n _initEvents() {\n if (this._supportPointerEvents) {\n EventHandler.on(this._element, EVENT_POINTERDOWN, event => this._start(event));\n EventHandler.on(this._element, EVENT_POINTERUP, event => this._end(event));\n this._element.classList.add(CLASS_NAME_POINTER_EVENT);\n } else {\n EventHandler.on(this._element, EVENT_TOUCHSTART, event => this._start(event));\n EventHandler.on(this._element, EVENT_TOUCHMOVE, event => this._move(event));\n EventHandler.on(this._element, EVENT_TOUCHEND, event => this._end(event));\n }\n }\n _eventIsPointerPenTouch(event) {\n return this._supportPointerEvents && (event.pointerType === POINTER_TYPE_PEN || event.pointerType === POINTER_TYPE_TOUCH);\n }\n\n // Static\n static isSupported() {\n return 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap carousel.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$c = 'carousel';\nconst DATA_KEY$8 = 'bs.carousel';\nconst EVENT_KEY$8 = `.${DATA_KEY$8}`;\nconst DATA_API_KEY$5 = '.data-api';\nconst ARROW_LEFT_KEY$1 = 'ArrowLeft';\nconst ARROW_RIGHT_KEY$1 = 'ArrowRight';\nconst TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch\n\nconst ORDER_NEXT = 'next';\nconst ORDER_PREV = 'prev';\nconst DIRECTION_LEFT = 'left';\nconst DIRECTION_RIGHT = 'right';\nconst EVENT_SLIDE = `slide${EVENT_KEY$8}`;\nconst EVENT_SLID = `slid${EVENT_KEY$8}`;\nconst EVENT_KEYDOWN$1 = `keydown${EVENT_KEY$8}`;\nconst EVENT_MOUSEENTER$1 = `mouseenter${EVENT_KEY$8}`;\nconst EVENT_MOUSELEAVE$1 = `mouseleave${EVENT_KEY$8}`;\nconst EVENT_DRAG_START = `dragstart${EVENT_KEY$8}`;\nconst EVENT_LOAD_DATA_API$3 = `load${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst EVENT_CLICK_DATA_API$5 = `click${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst CLASS_NAME_CAROUSEL = 'carousel';\nconst CLASS_NAME_ACTIVE$2 = 'active';\nconst CLASS_NAME_SLIDE = 'slide';\nconst CLASS_NAME_END = 'carousel-item-end';\nconst CLASS_NAME_START = 'carousel-item-start';\nconst CLASS_NAME_NEXT = 'carousel-item-next';\nconst CLASS_NAME_PREV = 'carousel-item-prev';\nconst SELECTOR_ACTIVE = '.active';\nconst SELECTOR_ITEM = '.carousel-item';\nconst SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM;\nconst SELECTOR_ITEM_IMG = '.carousel-item img';\nconst SELECTOR_INDICATORS = '.carousel-indicators';\nconst SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';\nconst SELECTOR_DATA_RIDE = '[data-bs-ride=\"carousel\"]';\nconst KEY_TO_DIRECTION = {\n [ARROW_LEFT_KEY$1]: DIRECTION_RIGHT,\n [ARROW_RIGHT_KEY$1]: DIRECTION_LEFT\n};\nconst Default$b = {\n interval: 5000,\n keyboard: true,\n pause: 'hover',\n ride: false,\n touch: true,\n wrap: true\n};\nconst DefaultType$b = {\n interval: '(number|boolean)',\n // TODO:v6 remove boolean support\n keyboard: 'boolean',\n pause: '(string|boolean)',\n ride: '(boolean|string)',\n touch: 'boolean',\n wrap: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Carousel extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._interval = null;\n this._activeElement = null;\n this._isSliding = false;\n this.touchTimeout = null;\n this._swipeHelper = null;\n this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);\n this._addEventListeners();\n if (this._config.ride === CLASS_NAME_CAROUSEL) {\n this.cycle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$b;\n }\n static get DefaultType() {\n return DefaultType$b;\n }\n static get NAME() {\n return NAME$c;\n }\n\n // Public\n next() {\n this._slide(ORDER_NEXT);\n }\n nextWhenVisible() {\n // FIXME TODO use `document.visibilityState`\n // Don't call next when the page isn't visible\n // or the carousel or its parent isn't visible\n if (!document.hidden && isVisible(this._element)) {\n this.next();\n }\n }\n prev() {\n this._slide(ORDER_PREV);\n }\n pause() {\n if (this._isSliding) {\n triggerTransitionEnd(this._element);\n }\n this._clearInterval();\n }\n cycle() {\n this._clearInterval();\n this._updateInterval();\n this._interval = setInterval(() => this.nextWhenVisible(), this._config.interval);\n }\n _maybeEnableCycle() {\n if (!this._config.ride) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.cycle());\n return;\n }\n this.cycle();\n }\n to(index) {\n const items = this._getItems();\n if (index > items.length - 1 || index < 0) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.to(index));\n return;\n }\n const activeIndex = this._getItemIndex(this._getActive());\n if (activeIndex === index) {\n return;\n }\n const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;\n this._slide(order, items[index]);\n }\n dispose() {\n if (this._swipeHelper) {\n this._swipeHelper.dispose();\n }\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n config.defaultInterval = config.interval;\n return config;\n }\n _addEventListeners() {\n if (this._config.keyboard) {\n EventHandler.on(this._element, EVENT_KEYDOWN$1, event => this._keydown(event));\n }\n if (this._config.pause === 'hover') {\n EventHandler.on(this._element, EVENT_MOUSEENTER$1, () => this.pause());\n EventHandler.on(this._element, EVENT_MOUSELEAVE$1, () => this._maybeEnableCycle());\n }\n if (this._config.touch && Swipe.isSupported()) {\n this._addTouchEventListeners();\n }\n }\n _addTouchEventListeners() {\n for (const img of SelectorEngine.find(SELECTOR_ITEM_IMG, this._element)) {\n EventHandler.on(img, EVENT_DRAG_START, event => event.preventDefault());\n }\n const endCallBack = () => {\n if (this._config.pause !== 'hover') {\n return;\n }\n\n // If it's a touch-enabled device, mouseenter/leave are fired as\n // part of the mouse compatibility events on first tap - the carousel\n // would stop cycling until user tapped out of it;\n // here, we listen for touchend, explicitly pause the carousel\n // (as if it's the second time we tap on it, mouseenter compat event\n // is NOT fired) and after a timeout (to allow for mouse compatibility\n // events to fire) we explicitly restart cycling\n\n this.pause();\n if (this.touchTimeout) {\n clearTimeout(this.touchTimeout);\n }\n this.touchTimeout = setTimeout(() => this._maybeEnableCycle(), TOUCHEVENT_COMPAT_WAIT + this._config.interval);\n };\n const swipeConfig = {\n leftCallback: () => this._slide(this._directionToOrder(DIRECTION_LEFT)),\n rightCallback: () => this._slide(this._directionToOrder(DIRECTION_RIGHT)),\n endCallback: endCallBack\n };\n this._swipeHelper = new Swipe(this._element, swipeConfig);\n }\n _keydown(event) {\n if (/input|textarea/i.test(event.target.tagName)) {\n return;\n }\n const direction = KEY_TO_DIRECTION[event.key];\n if (direction) {\n event.preventDefault();\n this._slide(this._directionToOrder(direction));\n }\n }\n _getItemIndex(element) {\n return this._getItems().indexOf(element);\n }\n _setActiveIndicatorElement(index) {\n if (!this._indicatorsElement) {\n return;\n }\n const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE, this._indicatorsElement);\n activeIndicator.classList.remove(CLASS_NAME_ACTIVE$2);\n activeIndicator.removeAttribute('aria-current');\n const newActiveIndicator = SelectorEngine.findOne(`[data-bs-slide-to=\"${index}\"]`, this._indicatorsElement);\n if (newActiveIndicator) {\n newActiveIndicator.classList.add(CLASS_NAME_ACTIVE$2);\n newActiveIndicator.setAttribute('aria-current', 'true');\n }\n }\n _updateInterval() {\n const element = this._activeElement || this._getActive();\n if (!element) {\n return;\n }\n const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);\n this._config.interval = elementInterval || this._config.defaultInterval;\n }\n _slide(order, element = null) {\n if (this._isSliding) {\n return;\n }\n const activeElement = this._getActive();\n const isNext = order === ORDER_NEXT;\n const nextElement = element || getNextActiveElement(this._getItems(), activeElement, isNext, this._config.wrap);\n if (nextElement === activeElement) {\n return;\n }\n const nextElementIndex = this._getItemIndex(nextElement);\n const triggerEvent = eventName => {\n return EventHandler.trigger(this._element, eventName, {\n relatedTarget: nextElement,\n direction: this._orderToDirection(order),\n from: this._getItemIndex(activeElement),\n to: nextElementIndex\n });\n };\n const slideEvent = triggerEvent(EVENT_SLIDE);\n if (slideEvent.defaultPrevented) {\n return;\n }\n if (!activeElement || !nextElement) {\n // Some weirdness is happening, so we bail\n // TODO: change tests that use empty divs to avoid this check\n return;\n }\n const isCycling = Boolean(this._interval);\n this.pause();\n this._isSliding = true;\n this._setActiveIndicatorElement(nextElementIndex);\n this._activeElement = nextElement;\n const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;\n const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;\n nextElement.classList.add(orderClassName);\n reflow(nextElement);\n activeElement.classList.add(directionalClassName);\n nextElement.classList.add(directionalClassName);\n const completeCallBack = () => {\n nextElement.classList.remove(directionalClassName, orderClassName);\n nextElement.classList.add(CLASS_NAME_ACTIVE$2);\n activeElement.classList.remove(CLASS_NAME_ACTIVE$2, orderClassName, directionalClassName);\n this._isSliding = false;\n triggerEvent(EVENT_SLID);\n };\n this._queueCallback(completeCallBack, activeElement, this._isAnimated());\n if (isCycling) {\n this.cycle();\n }\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_SLIDE);\n }\n _getActive() {\n return SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);\n }\n _getItems() {\n return SelectorEngine.find(SELECTOR_ITEM, this._element);\n }\n _clearInterval() {\n if (this._interval) {\n clearInterval(this._interval);\n this._interval = null;\n }\n }\n _directionToOrder(direction) {\n if (isRTL()) {\n return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;\n }\n return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;\n }\n _orderToDirection(order) {\n if (isRTL()) {\n return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;\n }\n return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Carousel.getOrCreateInstance(this, config);\n if (typeof config === 'number') {\n data.to(config);\n return;\n }\n if (typeof config === 'string') {\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$5, SELECTOR_DATA_SLIDE, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {\n return;\n }\n event.preventDefault();\n const carousel = Carousel.getOrCreateInstance(target);\n const slideIndex = this.getAttribute('data-bs-slide-to');\n if (slideIndex) {\n carousel.to(slideIndex);\n carousel._maybeEnableCycle();\n return;\n }\n if (Manipulator.getDataAttribute(this, 'slide') === 'next') {\n carousel.next();\n carousel._maybeEnableCycle();\n return;\n }\n carousel.prev();\n carousel._maybeEnableCycle();\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$3, () => {\n const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);\n for (const carousel of carousels) {\n Carousel.getOrCreateInstance(carousel);\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Carousel);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap collapse.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$b = 'collapse';\nconst DATA_KEY$7 = 'bs.collapse';\nconst EVENT_KEY$7 = `.${DATA_KEY$7}`;\nconst DATA_API_KEY$4 = '.data-api';\nconst EVENT_SHOW$6 = `show${EVENT_KEY$7}`;\nconst EVENT_SHOWN$6 = `shown${EVENT_KEY$7}`;\nconst EVENT_HIDE$6 = `hide${EVENT_KEY$7}`;\nconst EVENT_HIDDEN$6 = `hidden${EVENT_KEY$7}`;\nconst EVENT_CLICK_DATA_API$4 = `click${EVENT_KEY$7}${DATA_API_KEY$4}`;\nconst CLASS_NAME_SHOW$7 = 'show';\nconst CLASS_NAME_COLLAPSE = 'collapse';\nconst CLASS_NAME_COLLAPSING = 'collapsing';\nconst CLASS_NAME_COLLAPSED = 'collapsed';\nconst CLASS_NAME_DEEPER_CHILDREN = `:scope .${CLASS_NAME_COLLAPSE} .${CLASS_NAME_COLLAPSE}`;\nconst CLASS_NAME_HORIZONTAL = 'collapse-horizontal';\nconst WIDTH = 'width';\nconst HEIGHT = 'height';\nconst SELECTOR_ACTIVES = '.collapse.show, .collapse.collapsing';\nconst SELECTOR_DATA_TOGGLE$4 = '[data-bs-toggle=\"collapse\"]';\nconst Default$a = {\n parent: null,\n toggle: true\n};\nconst DefaultType$a = {\n parent: '(null|element)',\n toggle: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Collapse extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isTransitioning = false;\n this._triggerArray = [];\n const toggleList = SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);\n for (const elem of toggleList) {\n const selector = SelectorEngine.getSelectorFromElement(elem);\n const filterElement = SelectorEngine.find(selector).filter(foundElement => foundElement === this._element);\n if (selector !== null && filterElement.length) {\n this._triggerArray.push(elem);\n }\n }\n this._initializeChildren();\n if (!this._config.parent) {\n this._addAriaAndCollapsedClass(this._triggerArray, this._isShown());\n }\n if (this._config.toggle) {\n this.toggle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$a;\n }\n static get DefaultType() {\n return DefaultType$a;\n }\n static get NAME() {\n return NAME$b;\n }\n\n // Public\n toggle() {\n if (this._isShown()) {\n this.hide();\n } else {\n this.show();\n }\n }\n show() {\n if (this._isTransitioning || this._isShown()) {\n return;\n }\n let activeChildren = [];\n\n // find active children\n if (this._config.parent) {\n activeChildren = this._getFirstLevelChildren(SELECTOR_ACTIVES).filter(element => element !== this._element).map(element => Collapse.getOrCreateInstance(element, {\n toggle: false\n }));\n }\n if (activeChildren.length && activeChildren[0]._isTransitioning) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_SHOW$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n for (const activeInstance of activeChildren) {\n activeInstance.hide();\n }\n const dimension = this._getDimension();\n this._element.classList.remove(CLASS_NAME_COLLAPSE);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.style[dimension] = 0;\n this._addAriaAndCollapsedClass(this._triggerArray, true);\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n this._element.style[dimension] = '';\n EventHandler.trigger(this._element, EVENT_SHOWN$6);\n };\n const capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);\n const scrollSize = `scroll${capitalizedDimension}`;\n this._queueCallback(complete, this._element, true);\n this._element.style[dimension] = `${this._element[scrollSize]}px`;\n }\n hide() {\n if (this._isTransitioning || !this._isShown()) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_HIDE$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n const dimension = this._getDimension();\n this._element.style[dimension] = `${this._element.getBoundingClientRect()[dimension]}px`;\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.classList.remove(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n for (const trigger of this._triggerArray) {\n const element = SelectorEngine.getElementFromSelector(trigger);\n if (element && !this._isShown(element)) {\n this._addAriaAndCollapsedClass([trigger], false);\n }\n }\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE);\n EventHandler.trigger(this._element, EVENT_HIDDEN$6);\n };\n this._element.style[dimension] = '';\n this._queueCallback(complete, this._element, true);\n }\n _isShown(element = this._element) {\n return element.classList.contains(CLASS_NAME_SHOW$7);\n }\n\n // Private\n _configAfterMerge(config) {\n config.toggle = Boolean(config.toggle); // Coerce string values\n config.parent = getElement(config.parent);\n return config;\n }\n _getDimension() {\n return this._element.classList.contains(CLASS_NAME_HORIZONTAL) ? WIDTH : HEIGHT;\n }\n _initializeChildren() {\n if (!this._config.parent) {\n return;\n }\n const children = this._getFirstLevelChildren(SELECTOR_DATA_TOGGLE$4);\n for (const element of children) {\n const selected = SelectorEngine.getElementFromSelector(element);\n if (selected) {\n this._addAriaAndCollapsedClass([element], this._isShown(selected));\n }\n }\n }\n _getFirstLevelChildren(selector) {\n const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent);\n // remove children if greater depth\n return SelectorEngine.find(selector, this._config.parent).filter(element => !children.includes(element));\n }\n _addAriaAndCollapsedClass(triggerArray, isOpen) {\n if (!triggerArray.length) {\n return;\n }\n for (const element of triggerArray) {\n element.classList.toggle(CLASS_NAME_COLLAPSED, !isOpen);\n element.setAttribute('aria-expanded', isOpen);\n }\n }\n\n // Static\n static jQueryInterface(config) {\n const _config = {};\n if (typeof config === 'string' && /show|hide/.test(config)) {\n _config.toggle = false;\n }\n return this.each(function () {\n const data = Collapse.getOrCreateInstance(this, _config);\n if (typeof config === 'string') {\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$4, SELECTOR_DATA_TOGGLE$4, function (event) {\n // preventDefault only for elements (which change the URL) not inside the collapsible element\n if (event.target.tagName === 'A' || event.delegateTarget && event.delegateTarget.tagName === 'A') {\n event.preventDefault();\n }\n for (const element of SelectorEngine.getMultipleElementsFromSelector(this)) {\n Collapse.getOrCreateInstance(element, {\n toggle: false\n }).toggle();\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Collapse);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dropdown.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$a = 'dropdown';\nconst DATA_KEY$6 = 'bs.dropdown';\nconst EVENT_KEY$6 = `.${DATA_KEY$6}`;\nconst DATA_API_KEY$3 = '.data-api';\nconst ESCAPE_KEY$2 = 'Escape';\nconst TAB_KEY$1 = 'Tab';\nconst ARROW_UP_KEY$1 = 'ArrowUp';\nconst ARROW_DOWN_KEY$1 = 'ArrowDown';\nconst RIGHT_MOUSE_BUTTON = 2; // MouseEvent.button value for the secondary button, usually the right button\n\nconst EVENT_HIDE$5 = `hide${EVENT_KEY$6}`;\nconst EVENT_HIDDEN$5 = `hidden${EVENT_KEY$6}`;\nconst EVENT_SHOW$5 = `show${EVENT_KEY$6}`;\nconst EVENT_SHOWN$5 = `shown${EVENT_KEY$6}`;\nconst EVENT_CLICK_DATA_API$3 = `click${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYDOWN_DATA_API = `keydown${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYUP_DATA_API = `keyup${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst CLASS_NAME_SHOW$6 = 'show';\nconst CLASS_NAME_DROPUP = 'dropup';\nconst CLASS_NAME_DROPEND = 'dropend';\nconst CLASS_NAME_DROPSTART = 'dropstart';\nconst CLASS_NAME_DROPUP_CENTER = 'dropup-center';\nconst CLASS_NAME_DROPDOWN_CENTER = 'dropdown-center';\nconst SELECTOR_DATA_TOGGLE$3 = '[data-bs-toggle=\"dropdown\"]:not(.disabled):not(:disabled)';\nconst SELECTOR_DATA_TOGGLE_SHOWN = `${SELECTOR_DATA_TOGGLE$3}.${CLASS_NAME_SHOW$6}`;\nconst SELECTOR_MENU = '.dropdown-menu';\nconst SELECTOR_NAVBAR = '.navbar';\nconst SELECTOR_NAVBAR_NAV = '.navbar-nav';\nconst SELECTOR_VISIBLE_ITEMS = '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)';\nconst PLACEMENT_TOP = isRTL() ? 'top-end' : 'top-start';\nconst PLACEMENT_TOPEND = isRTL() ? 'top-start' : 'top-end';\nconst PLACEMENT_BOTTOM = isRTL() ? 'bottom-end' : 'bottom-start';\nconst PLACEMENT_BOTTOMEND = isRTL() ? 'bottom-start' : 'bottom-end';\nconst PLACEMENT_RIGHT = isRTL() ? 'left-start' : 'right-start';\nconst PLACEMENT_LEFT = isRTL() ? 'right-start' : 'left-start';\nconst PLACEMENT_TOPCENTER = 'top';\nconst PLACEMENT_BOTTOMCENTER = 'bottom';\nconst Default$9 = {\n autoClose: true,\n boundary: 'clippingParents',\n display: 'dynamic',\n offset: [0, 2],\n popperConfig: null,\n reference: 'toggle'\n};\nconst DefaultType$9 = {\n autoClose: '(boolean|string)',\n boundary: '(string|element)',\n display: 'string',\n offset: '(array|string|function)',\n popperConfig: '(null|object|function)',\n reference: '(string|element|object)'\n};\n\n/**\n * Class definition\n */\n\nclass Dropdown extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._popper = null;\n this._parent = this._element.parentNode; // dropdown wrapper\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n this._menu = SelectorEngine.next(this._element, SELECTOR_MENU)[0] || SelectorEngine.prev(this._element, SELECTOR_MENU)[0] || SelectorEngine.findOne(SELECTOR_MENU, this._parent);\n this._inNavbar = this._detectNavbar();\n }\n\n // Getters\n static get Default() {\n return Default$9;\n }\n static get DefaultType() {\n return DefaultType$9;\n }\n static get NAME() {\n return NAME$a;\n }\n\n // Public\n toggle() {\n return this._isShown() ? this.hide() : this.show();\n }\n show() {\n if (isDisabled(this._element) || this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$5, relatedTarget);\n if (showEvent.defaultPrevented) {\n return;\n }\n this._createPopper();\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement && !this._parent.closest(SELECTOR_NAVBAR_NAV)) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n this._element.focus();\n this._element.setAttribute('aria-expanded', true);\n this._menu.classList.add(CLASS_NAME_SHOW$6);\n this._element.classList.add(CLASS_NAME_SHOW$6);\n EventHandler.trigger(this._element, EVENT_SHOWN$5, relatedTarget);\n }\n hide() {\n if (isDisabled(this._element) || !this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n this._completeHide(relatedTarget);\n }\n dispose() {\n if (this._popper) {\n this._popper.destroy();\n }\n super.dispose();\n }\n update() {\n this._inNavbar = this._detectNavbar();\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Private\n _completeHide(relatedTarget) {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$5, relatedTarget);\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n if (this._popper) {\n this._popper.destroy();\n }\n this._menu.classList.remove(CLASS_NAME_SHOW$6);\n this._element.classList.remove(CLASS_NAME_SHOW$6);\n this._element.setAttribute('aria-expanded', 'false');\n Manipulator.removeDataAttribute(this._menu, 'popper');\n EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);\n }\n _getConfig(config) {\n config = super._getConfig(config);\n if (typeof config.reference === 'object' && !isElement(config.reference) && typeof config.reference.getBoundingClientRect !== 'function') {\n // Popper virtual elements require a getBoundingClientRect method\n throw new TypeError(`${NAME$a.toUpperCase()}: Option \"reference\" provided type \"object\" without a required \"getBoundingClientRect\" method.`);\n }\n return config;\n }\n _createPopper() {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s dropdowns require Popper (https://popper.js.org)');\n }\n let referenceElement = this._element;\n if (this._config.reference === 'parent') {\n referenceElement = this._parent;\n } else if (isElement(this._config.reference)) {\n referenceElement = getElement(this._config.reference);\n } else if (typeof this._config.reference === 'object') {\n referenceElement = this._config.reference;\n }\n const popperConfig = this._getPopperConfig();\n this._popper = Popper.createPopper(referenceElement, this._menu, popperConfig);\n }\n _isShown() {\n return this._menu.classList.contains(CLASS_NAME_SHOW$6);\n }\n _getPlacement() {\n const parentDropdown = this._parent;\n if (parentDropdown.classList.contains(CLASS_NAME_DROPEND)) {\n return PLACEMENT_RIGHT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPSTART)) {\n return PLACEMENT_LEFT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP_CENTER)) {\n return PLACEMENT_TOPCENTER;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPDOWN_CENTER)) {\n return PLACEMENT_BOTTOMCENTER;\n }\n\n // We need to trim the value because custom properties can also include spaces\n const isEnd = getComputedStyle(this._menu).getPropertyValue('--bs-position').trim() === 'end';\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP)) {\n return isEnd ? PLACEMENT_TOPEND : PLACEMENT_TOP;\n }\n return isEnd ? PLACEMENT_BOTTOMEND : PLACEMENT_BOTTOM;\n }\n _detectNavbar() {\n return this._element.closest(SELECTOR_NAVBAR) !== null;\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _getPopperConfig() {\n const defaultBsPopperConfig = {\n placement: this._getPlacement(),\n modifiers: [{\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }]\n };\n\n // Disable Popper if we have a static display or Dropdown is in Navbar\n if (this._inNavbar || this._config.display === 'static') {\n Manipulator.setDataAttribute(this._menu, 'popper', 'static'); // TODO: v6 remove\n defaultBsPopperConfig.modifiers = [{\n name: 'applyStyles',\n enabled: false\n }];\n }\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _selectMenuItem({\n key,\n target\n }) {\n const items = SelectorEngine.find(SELECTOR_VISIBLE_ITEMS, this._menu).filter(element => isVisible(element));\n if (!items.length) {\n return;\n }\n\n // if target isn't included in items (e.g. when expanding the dropdown)\n // allow cycling to get the last item in case key equals ARROW_UP_KEY\n getNextActiveElement(items, target, key === ARROW_DOWN_KEY$1, !items.includes(target)).focus();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Dropdown.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n static clearMenus(event) {\n if (event.button === RIGHT_MOUSE_BUTTON || event.type === 'keyup' && event.key !== TAB_KEY$1) {\n return;\n }\n const openToggles = SelectorEngine.find(SELECTOR_DATA_TOGGLE_SHOWN);\n for (const toggle of openToggles) {\n const context = Dropdown.getInstance(toggle);\n if (!context || context._config.autoClose === false) {\n continue;\n }\n const composedPath = event.composedPath();\n const isMenuTarget = composedPath.includes(context._menu);\n if (composedPath.includes(context._element) || context._config.autoClose === 'inside' && !isMenuTarget || context._config.autoClose === 'outside' && isMenuTarget) {\n continue;\n }\n\n // Tab navigation through the dropdown menu or events from contained inputs shouldn't close the menu\n if (context._menu.contains(event.target) && (event.type === 'keyup' && event.key === TAB_KEY$1 || /input|select|option|textarea|form/i.test(event.target.tagName))) {\n continue;\n }\n const relatedTarget = {\n relatedTarget: context._element\n };\n if (event.type === 'click') {\n relatedTarget.clickEvent = event;\n }\n context._completeHide(relatedTarget);\n }\n }\n static dataApiKeydownHandler(event) {\n // If not an UP | DOWN | ESCAPE key => not a dropdown command\n // If input/textarea && if key is other than ESCAPE => not a dropdown command\n\n const isInput = /input|textarea/i.test(event.target.tagName);\n const isEscapeEvent = event.key === ESCAPE_KEY$2;\n const isUpOrDownEvent = [ARROW_UP_KEY$1, ARROW_DOWN_KEY$1].includes(event.key);\n if (!isUpOrDownEvent && !isEscapeEvent) {\n return;\n }\n if (isInput && !isEscapeEvent) {\n return;\n }\n event.preventDefault();\n\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n const getToggleButton = this.matches(SELECTOR_DATA_TOGGLE$3) ? this : SelectorEngine.prev(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.next(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.findOne(SELECTOR_DATA_TOGGLE$3, event.delegateTarget.parentNode);\n const instance = Dropdown.getOrCreateInstance(getToggleButton);\n if (isUpOrDownEvent) {\n event.stopPropagation();\n instance.show();\n instance._selectMenuItem(event);\n return;\n }\n if (instance._isShown()) {\n // else is escape and we check if it is shown\n event.stopPropagation();\n instance.hide();\n getToggleButton.focus();\n }\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_DATA_TOGGLE$3, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_MENU, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_KEYUP_DATA_API, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, SELECTOR_DATA_TOGGLE$3, function (event) {\n event.preventDefault();\n Dropdown.getOrCreateInstance(this).toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Dropdown);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/backdrop.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$9 = 'backdrop';\nconst CLASS_NAME_FADE$4 = 'fade';\nconst CLASS_NAME_SHOW$5 = 'show';\nconst EVENT_MOUSEDOWN = `mousedown.bs.${NAME$9}`;\nconst Default$8 = {\n className: 'modal-backdrop',\n clickCallback: null,\n isAnimated: false,\n isVisible: true,\n // if false, we use the backdrop helper without adding any element to the dom\n rootElement: 'body' // give the choice to place backdrop under different elements\n};\nconst DefaultType$8 = {\n className: 'string',\n clickCallback: '(function|null)',\n isAnimated: 'boolean',\n isVisible: 'boolean',\n rootElement: '(element|string)'\n};\n\n/**\n * Class definition\n */\n\nclass Backdrop extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isAppended = false;\n this._element = null;\n }\n\n // Getters\n static get Default() {\n return Default$8;\n }\n static get DefaultType() {\n return DefaultType$8;\n }\n static get NAME() {\n return NAME$9;\n }\n\n // Public\n show(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._append();\n const element = this._getElement();\n if (this._config.isAnimated) {\n reflow(element);\n }\n element.classList.add(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n execute(callback);\n });\n }\n hide(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._getElement().classList.remove(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n this.dispose();\n execute(callback);\n });\n }\n dispose() {\n if (!this._isAppended) {\n return;\n }\n EventHandler.off(this._element, EVENT_MOUSEDOWN);\n this._element.remove();\n this._isAppended = false;\n }\n\n // Private\n _getElement() {\n if (!this._element) {\n const backdrop = document.createElement('div');\n backdrop.className = this._config.className;\n if (this._config.isAnimated) {\n backdrop.classList.add(CLASS_NAME_FADE$4);\n }\n this._element = backdrop;\n }\n return this._element;\n }\n _configAfterMerge(config) {\n // use getElement() with the default \"body\" to get a fresh Element on each instantiation\n config.rootElement = getElement(config.rootElement);\n return config;\n }\n _append() {\n if (this._isAppended) {\n return;\n }\n const element = this._getElement();\n this._config.rootElement.append(element);\n EventHandler.on(element, EVENT_MOUSEDOWN, () => {\n execute(this._config.clickCallback);\n });\n this._isAppended = true;\n }\n _emulateAnimation(callback) {\n executeAfterTransition(callback, this._getElement(), this._config.isAnimated);\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/focustrap.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$8 = 'focustrap';\nconst DATA_KEY$5 = 'bs.focustrap';\nconst EVENT_KEY$5 = `.${DATA_KEY$5}`;\nconst EVENT_FOCUSIN$2 = `focusin${EVENT_KEY$5}`;\nconst EVENT_KEYDOWN_TAB = `keydown.tab${EVENT_KEY$5}`;\nconst TAB_KEY = 'Tab';\nconst TAB_NAV_FORWARD = 'forward';\nconst TAB_NAV_BACKWARD = 'backward';\nconst Default$7 = {\n autofocus: true,\n trapElement: null // The element to trap focus inside of\n};\nconst DefaultType$7 = {\n autofocus: 'boolean',\n trapElement: 'element'\n};\n\n/**\n * Class definition\n */\n\nclass FocusTrap extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isActive = false;\n this._lastTabNavDirection = null;\n }\n\n // Getters\n static get Default() {\n return Default$7;\n }\n static get DefaultType() {\n return DefaultType$7;\n }\n static get NAME() {\n return NAME$8;\n }\n\n // Public\n activate() {\n if (this._isActive) {\n return;\n }\n if (this._config.autofocus) {\n this._config.trapElement.focus();\n }\n EventHandler.off(document, EVENT_KEY$5); // guard against infinite focus loop\n EventHandler.on(document, EVENT_FOCUSIN$2, event => this._handleFocusin(event));\n EventHandler.on(document, EVENT_KEYDOWN_TAB, event => this._handleKeydown(event));\n this._isActive = true;\n }\n deactivate() {\n if (!this._isActive) {\n return;\n }\n this._isActive = false;\n EventHandler.off(document, EVENT_KEY$5);\n }\n\n // Private\n _handleFocusin(event) {\n const {\n trapElement\n } = this._config;\n if (event.target === document || event.target === trapElement || trapElement.contains(event.target)) {\n return;\n }\n const elements = SelectorEngine.focusableChildren(trapElement);\n if (elements.length === 0) {\n trapElement.focus();\n } else if (this._lastTabNavDirection === TAB_NAV_BACKWARD) {\n elements[elements.length - 1].focus();\n } else {\n elements[0].focus();\n }\n }\n _handleKeydown(event) {\n if (event.key !== TAB_KEY) {\n return;\n }\n this._lastTabNavDirection = event.shiftKey ? TAB_NAV_BACKWARD : TAB_NAV_FORWARD;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/scrollBar.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst SELECTOR_FIXED_CONTENT = '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top';\nconst SELECTOR_STICKY_CONTENT = '.sticky-top';\nconst PROPERTY_PADDING = 'padding-right';\nconst PROPERTY_MARGIN = 'margin-right';\n\n/**\n * Class definition\n */\n\nclass ScrollBarHelper {\n constructor() {\n this._element = document.body;\n }\n\n // Public\n getWidth() {\n // https://developer.mozilla.org/en-US/docs/Web/API/Window/innerWidth#usage_notes\n const documentWidth = document.documentElement.clientWidth;\n return Math.abs(window.innerWidth - documentWidth);\n }\n hide() {\n const width = this.getWidth();\n this._disableOverFlow();\n // give padding to element to balance the hidden scrollbar width\n this._setElementAttributes(this._element, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n // trick: We adjust positive paddingRight and negative marginRight to sticky-top elements to keep showing fullwidth\n this._setElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n this._setElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN, calculatedValue => calculatedValue - width);\n }\n reset() {\n this._resetElementAttributes(this._element, 'overflow');\n this._resetElementAttributes(this._element, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN);\n }\n isOverflowing() {\n return this.getWidth() > 0;\n }\n\n // Private\n _disableOverFlow() {\n this._saveInitialAttribute(this._element, 'overflow');\n this._element.style.overflow = 'hidden';\n }\n _setElementAttributes(selector, styleProperty, callback) {\n const scrollbarWidth = this.getWidth();\n const manipulationCallBack = element => {\n if (element !== this._element && window.innerWidth > element.clientWidth + scrollbarWidth) {\n return;\n }\n this._saveInitialAttribute(element, styleProperty);\n const calculatedValue = window.getComputedStyle(element).getPropertyValue(styleProperty);\n element.style.setProperty(styleProperty, `${callback(Number.parseFloat(calculatedValue))}px`);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _saveInitialAttribute(element, styleProperty) {\n const actualValue = element.style.getPropertyValue(styleProperty);\n if (actualValue) {\n Manipulator.setDataAttribute(element, styleProperty, actualValue);\n }\n }\n _resetElementAttributes(selector, styleProperty) {\n const manipulationCallBack = element => {\n const value = Manipulator.getDataAttribute(element, styleProperty);\n // We only want to remove the property if the value is `null`; the value can also be zero\n if (value === null) {\n element.style.removeProperty(styleProperty);\n return;\n }\n Manipulator.removeDataAttribute(element, styleProperty);\n element.style.setProperty(styleProperty, value);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _applyManipulationCallback(selector, callBack) {\n if (isElement(selector)) {\n callBack(selector);\n return;\n }\n for (const sel of SelectorEngine.find(selector, this._element)) {\n callBack(sel);\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap modal.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$7 = 'modal';\nconst DATA_KEY$4 = 'bs.modal';\nconst EVENT_KEY$4 = `.${DATA_KEY$4}`;\nconst DATA_API_KEY$2 = '.data-api';\nconst ESCAPE_KEY$1 = 'Escape';\nconst EVENT_HIDE$4 = `hide${EVENT_KEY$4}`;\nconst EVENT_HIDE_PREVENTED$1 = `hidePrevented${EVENT_KEY$4}`;\nconst EVENT_HIDDEN$4 = `hidden${EVENT_KEY$4}`;\nconst EVENT_SHOW$4 = `show${EVENT_KEY$4}`;\nconst EVENT_SHOWN$4 = `shown${EVENT_KEY$4}`;\nconst EVENT_RESIZE$1 = `resize${EVENT_KEY$4}`;\nconst EVENT_CLICK_DISMISS = `click.dismiss${EVENT_KEY$4}`;\nconst EVENT_MOUSEDOWN_DISMISS = `mousedown.dismiss${EVENT_KEY$4}`;\nconst EVENT_KEYDOWN_DISMISS$1 = `keydown.dismiss${EVENT_KEY$4}`;\nconst EVENT_CLICK_DATA_API$2 = `click${EVENT_KEY$4}${DATA_API_KEY$2}`;\nconst CLASS_NAME_OPEN = 'modal-open';\nconst CLASS_NAME_FADE$3 = 'fade';\nconst CLASS_NAME_SHOW$4 = 'show';\nconst CLASS_NAME_STATIC = 'modal-static';\nconst OPEN_SELECTOR$1 = '.modal.show';\nconst SELECTOR_DIALOG = '.modal-dialog';\nconst SELECTOR_MODAL_BODY = '.modal-body';\nconst SELECTOR_DATA_TOGGLE$2 = '[data-bs-toggle=\"modal\"]';\nconst Default$6 = {\n backdrop: true,\n focus: true,\n keyboard: true\n};\nconst DefaultType$6 = {\n backdrop: '(boolean|string)',\n focus: 'boolean',\n keyboard: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Modal extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._dialog = SelectorEngine.findOne(SELECTOR_DIALOG, this._element);\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._isShown = false;\n this._isTransitioning = false;\n this._scrollBar = new ScrollBarHelper();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$6;\n }\n static get DefaultType() {\n return DefaultType$6;\n }\n static get NAME() {\n return NAME$7;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown || this._isTransitioning) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$4, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._isTransitioning = true;\n this._scrollBar.hide();\n document.body.classList.add(CLASS_NAME_OPEN);\n this._adjustDialog();\n this._backdrop.show(() => this._showElement(relatedTarget));\n }\n hide() {\n if (!this._isShown || this._isTransitioning) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$4);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._isShown = false;\n this._isTransitioning = true;\n this._focustrap.deactivate();\n this._element.classList.remove(CLASS_NAME_SHOW$4);\n this._queueCallback(() => this._hideModal(), this._element, this._isAnimated());\n }\n dispose() {\n EventHandler.off(window, EVENT_KEY$4);\n EventHandler.off(this._dialog, EVENT_KEY$4);\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n handleUpdate() {\n this._adjustDialog();\n }\n\n // Private\n _initializeBackDrop() {\n return new Backdrop({\n isVisible: Boolean(this._config.backdrop),\n // 'static' option will be translated to true, and booleans will keep their value,\n isAnimated: this._isAnimated()\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _showElement(relatedTarget) {\n // try to append dynamic modal\n if (!document.body.contains(this._element)) {\n document.body.append(this._element);\n }\n this._element.style.display = 'block';\n this._element.removeAttribute('aria-hidden');\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.scrollTop = 0;\n const modalBody = SelectorEngine.findOne(SELECTOR_MODAL_BODY, this._dialog);\n if (modalBody) {\n modalBody.scrollTop = 0;\n }\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_SHOW$4);\n const transitionComplete = () => {\n if (this._config.focus) {\n this._focustrap.activate();\n }\n this._isTransitioning = false;\n EventHandler.trigger(this._element, EVENT_SHOWN$4, {\n relatedTarget\n });\n };\n this._queueCallback(transitionComplete, this._dialog, this._isAnimated());\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS$1, event => {\n if (event.key !== ESCAPE_KEY$1) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n this._triggerBackdropTransition();\n });\n EventHandler.on(window, EVENT_RESIZE$1, () => {\n if (this._isShown && !this._isTransitioning) {\n this._adjustDialog();\n }\n });\n EventHandler.on(this._element, EVENT_MOUSEDOWN_DISMISS, event => {\n // a bad trick to segregate clicks that may start inside dialog but end outside, and avoid listen to scrollbar clicks\n EventHandler.one(this._element, EVENT_CLICK_DISMISS, event2 => {\n if (this._element !== event.target || this._element !== event2.target) {\n return;\n }\n if (this._config.backdrop === 'static') {\n this._triggerBackdropTransition();\n return;\n }\n if (this._config.backdrop) {\n this.hide();\n }\n });\n });\n }\n _hideModal() {\n this._element.style.display = 'none';\n this._element.setAttribute('aria-hidden', true);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n this._isTransitioning = false;\n this._backdrop.hide(() => {\n document.body.classList.remove(CLASS_NAME_OPEN);\n this._resetAdjustments();\n this._scrollBar.reset();\n EventHandler.trigger(this._element, EVENT_HIDDEN$4);\n });\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_FADE$3);\n }\n _triggerBackdropTransition() {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED$1);\n if (hideEvent.defaultPrevented) {\n return;\n }\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const initialOverflowY = this._element.style.overflowY;\n // return if the following background transition hasn't yet completed\n if (initialOverflowY === 'hidden' || this._element.classList.contains(CLASS_NAME_STATIC)) {\n return;\n }\n if (!isModalOverflowing) {\n this._element.style.overflowY = 'hidden';\n }\n this._element.classList.add(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.classList.remove(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.style.overflowY = initialOverflowY;\n }, this._dialog);\n }, this._dialog);\n this._element.focus();\n }\n\n /**\n * The following methods are used to handle overflowing modals\n */\n\n _adjustDialog() {\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const scrollbarWidth = this._scrollBar.getWidth();\n const isBodyOverflowing = scrollbarWidth > 0;\n if (isBodyOverflowing && !isModalOverflowing) {\n const property = isRTL() ? 'paddingLeft' : 'paddingRight';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n if (!isBodyOverflowing && isModalOverflowing) {\n const property = isRTL() ? 'paddingRight' : 'paddingLeft';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n }\n _resetAdjustments() {\n this._element.style.paddingLeft = '';\n this._element.style.paddingRight = '';\n }\n\n // Static\n static jQueryInterface(config, relatedTarget) {\n return this.each(function () {\n const data = Modal.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](relatedTarget);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$2, SELECTOR_DATA_TOGGLE$2, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n EventHandler.one(target, EVENT_SHOW$4, showEvent => {\n if (showEvent.defaultPrevented) {\n // only register focus restorer if modal will actually get shown\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$4, () => {\n if (isVisible(this)) {\n this.focus();\n }\n });\n });\n\n // avoid conflict when clicking modal toggler while another one is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR$1);\n if (alreadyOpen) {\n Modal.getInstance(alreadyOpen).hide();\n }\n const data = Modal.getOrCreateInstance(target);\n data.toggle(this);\n});\nenableDismissTrigger(Modal);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Modal);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap offcanvas.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$6 = 'offcanvas';\nconst DATA_KEY$3 = 'bs.offcanvas';\nconst EVENT_KEY$3 = `.${DATA_KEY$3}`;\nconst DATA_API_KEY$1 = '.data-api';\nconst EVENT_LOAD_DATA_API$2 = `load${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst ESCAPE_KEY = 'Escape';\nconst CLASS_NAME_SHOW$3 = 'show';\nconst CLASS_NAME_SHOWING$1 = 'showing';\nconst CLASS_NAME_HIDING = 'hiding';\nconst CLASS_NAME_BACKDROP = 'offcanvas-backdrop';\nconst OPEN_SELECTOR = '.offcanvas.show';\nconst EVENT_SHOW$3 = `show${EVENT_KEY$3}`;\nconst EVENT_SHOWN$3 = `shown${EVENT_KEY$3}`;\nconst EVENT_HIDE$3 = `hide${EVENT_KEY$3}`;\nconst EVENT_HIDE_PREVENTED = `hidePrevented${EVENT_KEY$3}`;\nconst EVENT_HIDDEN$3 = `hidden${EVENT_KEY$3}`;\nconst EVENT_RESIZE = `resize${EVENT_KEY$3}`;\nconst EVENT_CLICK_DATA_API$1 = `click${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst EVENT_KEYDOWN_DISMISS = `keydown.dismiss${EVENT_KEY$3}`;\nconst SELECTOR_DATA_TOGGLE$1 = '[data-bs-toggle=\"offcanvas\"]';\nconst Default$5 = {\n backdrop: true,\n keyboard: true,\n scroll: false\n};\nconst DefaultType$5 = {\n backdrop: '(boolean|string)',\n keyboard: 'boolean',\n scroll: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Offcanvas extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isShown = false;\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$5;\n }\n static get DefaultType() {\n return DefaultType$5;\n }\n static get NAME() {\n return NAME$6;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$3, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._backdrop.show();\n if (!this._config.scroll) {\n new ScrollBarHelper().hide();\n }\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.classList.add(CLASS_NAME_SHOWING$1);\n const completeCallBack = () => {\n if (!this._config.scroll || this._config.backdrop) {\n this._focustrap.activate();\n }\n this._element.classList.add(CLASS_NAME_SHOW$3);\n this._element.classList.remove(CLASS_NAME_SHOWING$1);\n EventHandler.trigger(this._element, EVENT_SHOWN$3, {\n relatedTarget\n });\n };\n this._queueCallback(completeCallBack, this._element, true);\n }\n hide() {\n if (!this._isShown) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$3);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._focustrap.deactivate();\n this._element.blur();\n this._isShown = false;\n this._element.classList.add(CLASS_NAME_HIDING);\n this._backdrop.hide();\n const completeCallback = () => {\n this._element.classList.remove(CLASS_NAME_SHOW$3, CLASS_NAME_HIDING);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n if (!this._config.scroll) {\n new ScrollBarHelper().reset();\n }\n EventHandler.trigger(this._element, EVENT_HIDDEN$3);\n };\n this._queueCallback(completeCallback, this._element, true);\n }\n dispose() {\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n\n // Private\n _initializeBackDrop() {\n const clickCallback = () => {\n if (this._config.backdrop === 'static') {\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n return;\n }\n this.hide();\n };\n\n // 'static' option will be translated to true, and booleans will keep their value\n const isVisible = Boolean(this._config.backdrop);\n return new Backdrop({\n className: CLASS_NAME_BACKDROP,\n isVisible,\n isAnimated: true,\n rootElement: this._element.parentNode,\n clickCallback: isVisible ? clickCallback : null\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS, event => {\n if (event.key !== ESCAPE_KEY) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n });\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Offcanvas.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$1, SELECTOR_DATA_TOGGLE$1, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$3, () => {\n // focus on trigger when it is closed\n if (isVisible(this)) {\n this.focus();\n }\n });\n\n // avoid conflict when clicking a toggler of an offcanvas, while another is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR);\n if (alreadyOpen && alreadyOpen !== target) {\n Offcanvas.getInstance(alreadyOpen).hide();\n }\n const data = Offcanvas.getOrCreateInstance(target);\n data.toggle(this);\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$2, () => {\n for (const selector of SelectorEngine.find(OPEN_SELECTOR)) {\n Offcanvas.getOrCreateInstance(selector).show();\n }\n});\nEventHandler.on(window, EVENT_RESIZE, () => {\n for (const element of SelectorEngine.find('[aria-modal][class*=show][class*=offcanvas-]')) {\n if (getComputedStyle(element).position !== 'fixed') {\n Offcanvas.getOrCreateInstance(element).hide();\n }\n }\n});\nenableDismissTrigger(Offcanvas);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Offcanvas);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/sanitizer.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n// js-docs-start allow-list\nconst ARIA_ATTRIBUTE_PATTERN = /^aria-[\\w-]*$/i;\nconst DefaultAllowlist = {\n // Global attributes allowed on any supplied element below.\n '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],\n a: ['target', 'href', 'title', 'rel'],\n area: [],\n b: [],\n br: [],\n col: [],\n code: [],\n dd: [],\n div: [],\n dl: [],\n dt: [],\n em: [],\n hr: [],\n h1: [],\n h2: [],\n h3: [],\n h4: [],\n h5: [],\n h6: [],\n i: [],\n img: ['src', 'srcset', 'alt', 'title', 'width', 'height'],\n li: [],\n ol: [],\n p: [],\n pre: [],\n s: [],\n small: [],\n span: [],\n sub: [],\n sup: [],\n strong: [],\n u: [],\n ul: []\n};\n// js-docs-end allow-list\n\nconst uriAttributes = new Set(['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href']);\n\n/**\n * A pattern that recognizes URLs that are safe wrt. XSS in URL navigation\n * contexts.\n *\n * Shout-out to Angular https://github.com/angular/angular/blob/15.2.8/packages/core/src/sanitization/url_sanitizer.ts#L38\n */\n// eslint-disable-next-line unicorn/better-regex\nconst SAFE_URL_PATTERN = /^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i;\nconst allowedAttribute = (attribute, allowedAttributeList) => {\n const attributeName = attribute.nodeName.toLowerCase();\n if (allowedAttributeList.includes(attributeName)) {\n if (uriAttributes.has(attributeName)) {\n return Boolean(SAFE_URL_PATTERN.test(attribute.nodeValue));\n }\n return true;\n }\n\n // Check if a regular expression validates the attribute.\n return allowedAttributeList.filter(attributeRegex => attributeRegex instanceof RegExp).some(regex => regex.test(attributeName));\n};\nfunction sanitizeHtml(unsafeHtml, allowList, sanitizeFunction) {\n if (!unsafeHtml.length) {\n return unsafeHtml;\n }\n if (sanitizeFunction && typeof sanitizeFunction === 'function') {\n return sanitizeFunction(unsafeHtml);\n }\n const domParser = new window.DOMParser();\n const createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');\n const elements = [].concat(...createdDocument.body.querySelectorAll('*'));\n for (const element of elements) {\n const elementName = element.nodeName.toLowerCase();\n if (!Object.keys(allowList).includes(elementName)) {\n element.remove();\n continue;\n }\n const attributeList = [].concat(...element.attributes);\n const allowedAttributes = [].concat(allowList['*'] || [], allowList[elementName] || []);\n for (const attribute of attributeList) {\n if (!allowedAttribute(attribute, allowedAttributes)) {\n element.removeAttribute(attribute.nodeName);\n }\n }\n }\n return createdDocument.body.innerHTML;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/template-factory.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$5 = 'TemplateFactory';\nconst Default$4 = {\n allowList: DefaultAllowlist,\n content: {},\n // { selector : text , selector2 : text2 , }\n extraClass: '',\n html: false,\n sanitize: true,\n sanitizeFn: null,\n template: '
'\n};\nconst DefaultType$4 = {\n allowList: 'object',\n content: 'object',\n extraClass: '(string|function)',\n html: 'boolean',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n template: 'string'\n};\nconst DefaultContentType = {\n entry: '(string|element|function|null)',\n selector: '(string|element)'\n};\n\n/**\n * Class definition\n */\n\nclass TemplateFactory extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n }\n\n // Getters\n static get Default() {\n return Default$4;\n }\n static get DefaultType() {\n return DefaultType$4;\n }\n static get NAME() {\n return NAME$5;\n }\n\n // Public\n getContent() {\n return Object.values(this._config.content).map(config => this._resolvePossibleFunction(config)).filter(Boolean);\n }\n hasContent() {\n return this.getContent().length > 0;\n }\n changeContent(content) {\n this._checkContent(content);\n this._config.content = {\n ...this._config.content,\n ...content\n };\n return this;\n }\n toHtml() {\n const templateWrapper = document.createElement('div');\n templateWrapper.innerHTML = this._maybeSanitize(this._config.template);\n for (const [selector, text] of Object.entries(this._config.content)) {\n this._setContent(templateWrapper, text, selector);\n }\n const template = templateWrapper.children[0];\n const extraClass = this._resolvePossibleFunction(this._config.extraClass);\n if (extraClass) {\n template.classList.add(...extraClass.split(' '));\n }\n return template;\n }\n\n // Private\n _typeCheckConfig(config) {\n super._typeCheckConfig(config);\n this._checkContent(config.content);\n }\n _checkContent(arg) {\n for (const [selector, content] of Object.entries(arg)) {\n super._typeCheckConfig({\n selector,\n entry: content\n }, DefaultContentType);\n }\n }\n _setContent(template, content, selector) {\n const templateElement = SelectorEngine.findOne(selector, template);\n if (!templateElement) {\n return;\n }\n content = this._resolvePossibleFunction(content);\n if (!content) {\n templateElement.remove();\n return;\n }\n if (isElement(content)) {\n this._putElementInTemplate(getElement(content), templateElement);\n return;\n }\n if (this._config.html) {\n templateElement.innerHTML = this._maybeSanitize(content);\n return;\n }\n templateElement.textContent = content;\n }\n _maybeSanitize(arg) {\n return this._config.sanitize ? sanitizeHtml(arg, this._config.allowList, this._config.sanitizeFn) : arg;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this]);\n }\n _putElementInTemplate(element, templateElement) {\n if (this._config.html) {\n templateElement.innerHTML = '';\n templateElement.append(element);\n return;\n }\n templateElement.textContent = element.textContent;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap tooltip.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$4 = 'tooltip';\nconst DISALLOWED_ATTRIBUTES = new Set(['sanitize', 'allowList', 'sanitizeFn']);\nconst CLASS_NAME_FADE$2 = 'fade';\nconst CLASS_NAME_MODAL = 'modal';\nconst CLASS_NAME_SHOW$2 = 'show';\nconst SELECTOR_TOOLTIP_INNER = '.tooltip-inner';\nconst SELECTOR_MODAL = `.${CLASS_NAME_MODAL}`;\nconst EVENT_MODAL_HIDE = 'hide.bs.modal';\nconst TRIGGER_HOVER = 'hover';\nconst TRIGGER_FOCUS = 'focus';\nconst TRIGGER_CLICK = 'click';\nconst TRIGGER_MANUAL = 'manual';\nconst EVENT_HIDE$2 = 'hide';\nconst EVENT_HIDDEN$2 = 'hidden';\nconst EVENT_SHOW$2 = 'show';\nconst EVENT_SHOWN$2 = 'shown';\nconst EVENT_INSERTED = 'inserted';\nconst EVENT_CLICK$1 = 'click';\nconst EVENT_FOCUSIN$1 = 'focusin';\nconst EVENT_FOCUSOUT$1 = 'focusout';\nconst EVENT_MOUSEENTER = 'mouseenter';\nconst EVENT_MOUSELEAVE = 'mouseleave';\nconst AttachmentMap = {\n AUTO: 'auto',\n TOP: 'top',\n RIGHT: isRTL() ? 'left' : 'right',\n BOTTOM: 'bottom',\n LEFT: isRTL() ? 'right' : 'left'\n};\nconst Default$3 = {\n allowList: DefaultAllowlist,\n animation: true,\n boundary: 'clippingParents',\n container: false,\n customClass: '',\n delay: 0,\n fallbackPlacements: ['top', 'right', 'bottom', 'left'],\n html: false,\n offset: [0, 6],\n placement: 'top',\n popperConfig: null,\n sanitize: true,\n sanitizeFn: null,\n selector: false,\n template: '
' + '
' + '
' + '
',\n title: '',\n trigger: 'hover focus'\n};\nconst DefaultType$3 = {\n allowList: 'object',\n animation: 'boolean',\n boundary: '(string|element)',\n container: '(string|element|boolean)',\n customClass: '(string|function)',\n delay: '(number|object)',\n fallbackPlacements: 'array',\n html: 'boolean',\n offset: '(array|string|function)',\n placement: '(string|function)',\n popperConfig: '(null|object|function)',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n selector: '(string|boolean)',\n template: 'string',\n title: '(string|element|function)',\n trigger: 'string'\n};\n\n/**\n * Class definition\n */\n\nclass Tooltip extends BaseComponent {\n constructor(element, config) {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s tooltips require Popper (https://popper.js.org)');\n }\n super(element, config);\n\n // Private\n this._isEnabled = true;\n this._timeout = 0;\n this._isHovered = null;\n this._activeTrigger = {};\n this._popper = null;\n this._templateFactory = null;\n this._newContent = null;\n\n // Protected\n this.tip = null;\n this._setListeners();\n if (!this._config.selector) {\n this._fixTitle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$3;\n }\n static get DefaultType() {\n return DefaultType$3;\n }\n static get NAME() {\n return NAME$4;\n }\n\n // Public\n enable() {\n this._isEnabled = true;\n }\n disable() {\n this._isEnabled = false;\n }\n toggleEnabled() {\n this._isEnabled = !this._isEnabled;\n }\n toggle() {\n if (!this._isEnabled) {\n return;\n }\n this._activeTrigger.click = !this._activeTrigger.click;\n if (this._isShown()) {\n this._leave();\n return;\n }\n this._enter();\n }\n dispose() {\n clearTimeout(this._timeout);\n EventHandler.off(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n if (this._element.getAttribute('data-bs-original-title')) {\n this._element.setAttribute('title', this._element.getAttribute('data-bs-original-title'));\n }\n this._disposePopper();\n super.dispose();\n }\n show() {\n if (this._element.style.display === 'none') {\n throw new Error('Please use show on visible elements');\n }\n if (!(this._isWithContent() && this._isEnabled)) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOW$2));\n const shadowRoot = findShadowRoot(this._element);\n const isInTheDom = (shadowRoot || this._element.ownerDocument.documentElement).contains(this._element);\n if (showEvent.defaultPrevented || !isInTheDom) {\n return;\n }\n\n // TODO: v6 remove this or make it optional\n this._disposePopper();\n const tip = this._getTipElement();\n this._element.setAttribute('aria-describedby', tip.getAttribute('id'));\n const {\n container\n } = this._config;\n if (!this._element.ownerDocument.documentElement.contains(this.tip)) {\n container.append(tip);\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_INSERTED));\n }\n this._popper = this._createPopper(tip);\n tip.classList.add(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n const complete = () => {\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOWN$2));\n if (this._isHovered === false) {\n this._leave();\n }\n this._isHovered = false;\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n hide() {\n if (!this._isShown()) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDE$2));\n if (hideEvent.defaultPrevented) {\n return;\n }\n const tip = this._getTipElement();\n tip.classList.remove(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n this._activeTrigger[TRIGGER_CLICK] = false;\n this._activeTrigger[TRIGGER_FOCUS] = false;\n this._activeTrigger[TRIGGER_HOVER] = false;\n this._isHovered = null; // it is a trick to support manual triggering\n\n const complete = () => {\n if (this._isWithActiveTrigger()) {\n return;\n }\n if (!this._isHovered) {\n this._disposePopper();\n }\n this._element.removeAttribute('aria-describedby');\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDDEN$2));\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n update() {\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Protected\n _isWithContent() {\n return Boolean(this._getTitle());\n }\n _getTipElement() {\n if (!this.tip) {\n this.tip = this._createTipElement(this._newContent || this._getContentForTemplate());\n }\n return this.tip;\n }\n _createTipElement(content) {\n const tip = this._getTemplateFactory(content).toHtml();\n\n // TODO: remove this check in v6\n if (!tip) {\n return null;\n }\n tip.classList.remove(CLASS_NAME_FADE$2, CLASS_NAME_SHOW$2);\n // TODO: v6 the following can be achieved with CSS only\n tip.classList.add(`bs-${this.constructor.NAME}-auto`);\n const tipId = getUID(this.constructor.NAME).toString();\n tip.setAttribute('id', tipId);\n if (this._isAnimated()) {\n tip.classList.add(CLASS_NAME_FADE$2);\n }\n return tip;\n }\n setContent(content) {\n this._newContent = content;\n if (this._isShown()) {\n this._disposePopper();\n this.show();\n }\n }\n _getTemplateFactory(content) {\n if (this._templateFactory) {\n this._templateFactory.changeContent(content);\n } else {\n this._templateFactory = new TemplateFactory({\n ...this._config,\n // the `content` var has to be after `this._config`\n // to override config.content in case of popover\n content,\n extraClass: this._resolvePossibleFunction(this._config.customClass)\n });\n }\n return this._templateFactory;\n }\n _getContentForTemplate() {\n return {\n [SELECTOR_TOOLTIP_INNER]: this._getTitle()\n };\n }\n _getTitle() {\n return this._resolvePossibleFunction(this._config.title) || this._element.getAttribute('data-bs-original-title');\n }\n\n // Private\n _initializeOnDelegatedTarget(event) {\n return this.constructor.getOrCreateInstance(event.delegateTarget, this._getDelegateConfig());\n }\n _isAnimated() {\n return this._config.animation || this.tip && this.tip.classList.contains(CLASS_NAME_FADE$2);\n }\n _isShown() {\n return this.tip && this.tip.classList.contains(CLASS_NAME_SHOW$2);\n }\n _createPopper(tip) {\n const placement = execute(this._config.placement, [this, tip, this._element]);\n const attachment = AttachmentMap[placement.toUpperCase()];\n return Popper.createPopper(this._element, tip, this._getPopperConfig(attachment));\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this._element]);\n }\n _getPopperConfig(attachment) {\n const defaultBsPopperConfig = {\n placement: attachment,\n modifiers: [{\n name: 'flip',\n options: {\n fallbackPlacements: this._config.fallbackPlacements\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }, {\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'arrow',\n options: {\n element: `.${this.constructor.NAME}-arrow`\n }\n }, {\n name: 'preSetPlacement',\n enabled: true,\n phase: 'beforeMain',\n fn: data => {\n // Pre-set Popper's placement attribute in order to read the arrow sizes properly.\n // Otherwise, Popper mixes up the width and height dimensions since the initial arrow style is for top placement\n this._getTipElement().setAttribute('data-popper-placement', data.state.placement);\n }\n }]\n };\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _setListeners() {\n const triggers = this._config.trigger.split(' ');\n for (const trigger of triggers) {\n if (trigger === 'click') {\n EventHandler.on(this._element, this.constructor.eventName(EVENT_CLICK$1), this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context.toggle();\n });\n } else if (trigger !== TRIGGER_MANUAL) {\n const eventIn = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSEENTER) : this.constructor.eventName(EVENT_FOCUSIN$1);\n const eventOut = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSELEAVE) : this.constructor.eventName(EVENT_FOCUSOUT$1);\n EventHandler.on(this._element, eventIn, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusin' ? TRIGGER_FOCUS : TRIGGER_HOVER] = true;\n context._enter();\n });\n EventHandler.on(this._element, eventOut, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusout' ? TRIGGER_FOCUS : TRIGGER_HOVER] = context._element.contains(event.relatedTarget);\n context._leave();\n });\n }\n }\n this._hideModalHandler = () => {\n if (this._element) {\n this.hide();\n }\n };\n EventHandler.on(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n }\n _fixTitle() {\n const title = this._element.getAttribute('title');\n if (!title) {\n return;\n }\n if (!this._element.getAttribute('aria-label') && !this._element.textContent.trim()) {\n this._element.setAttribute('aria-label', title);\n }\n this._element.setAttribute('data-bs-original-title', title); // DO NOT USE IT. Is only for backwards compatibility\n this._element.removeAttribute('title');\n }\n _enter() {\n if (this._isShown() || this._isHovered) {\n this._isHovered = true;\n return;\n }\n this._isHovered = true;\n this._setTimeout(() => {\n if (this._isHovered) {\n this.show();\n }\n }, this._config.delay.show);\n }\n _leave() {\n if (this._isWithActiveTrigger()) {\n return;\n }\n this._isHovered = false;\n this._setTimeout(() => {\n if (!this._isHovered) {\n this.hide();\n }\n }, this._config.delay.hide);\n }\n _setTimeout(handler, timeout) {\n clearTimeout(this._timeout);\n this._timeout = setTimeout(handler, timeout);\n }\n _isWithActiveTrigger() {\n return Object.values(this._activeTrigger).includes(true);\n }\n _getConfig(config) {\n const dataAttributes = Manipulator.getDataAttributes(this._element);\n for (const dataAttribute of Object.keys(dataAttributes)) {\n if (DISALLOWED_ATTRIBUTES.has(dataAttribute)) {\n delete dataAttributes[dataAttribute];\n }\n }\n config = {\n ...dataAttributes,\n ...(typeof config === 'object' && config ? config : {})\n };\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n config.container = config.container === false ? document.body : getElement(config.container);\n if (typeof config.delay === 'number') {\n config.delay = {\n show: config.delay,\n hide: config.delay\n };\n }\n if (typeof config.title === 'number') {\n config.title = config.title.toString();\n }\n if (typeof config.content === 'number') {\n config.content = config.content.toString();\n }\n return config;\n }\n _getDelegateConfig() {\n const config = {};\n for (const [key, value] of Object.entries(this._config)) {\n if (this.constructor.Default[key] !== value) {\n config[key] = value;\n }\n }\n config.selector = false;\n config.trigger = 'manual';\n\n // In the future can be replaced with:\n // const keysWithDifferentValues = Object.entries(this._config).filter(entry => this.constructor.Default[entry[0]] !== this._config[entry[0]])\n // `Object.fromEntries(keysWithDifferentValues)`\n return config;\n }\n _disposePopper() {\n if (this._popper) {\n this._popper.destroy();\n this._popper = null;\n }\n if (this.tip) {\n this.tip.remove();\n this.tip = null;\n }\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Tooltip.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Tooltip);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap popover.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$3 = 'popover';\nconst SELECTOR_TITLE = '.popover-header';\nconst SELECTOR_CONTENT = '.popover-body';\nconst Default$2 = {\n ...Tooltip.Default,\n content: '',\n offset: [0, 8],\n placement: 'right',\n template: '
' + '
' + '

' + '
' + '
',\n trigger: 'click'\n};\nconst DefaultType$2 = {\n ...Tooltip.DefaultType,\n content: '(null|string|element|function)'\n};\n\n/**\n * Class definition\n */\n\nclass Popover extends Tooltip {\n // Getters\n static get Default() {\n return Default$2;\n }\n static get DefaultType() {\n return DefaultType$2;\n }\n static get NAME() {\n return NAME$3;\n }\n\n // Overrides\n _isWithContent() {\n return this._getTitle() || this._getContent();\n }\n\n // Private\n _getContentForTemplate() {\n return {\n [SELECTOR_TITLE]: this._getTitle(),\n [SELECTOR_CONTENT]: this._getContent()\n };\n }\n _getContent() {\n return this._resolvePossibleFunction(this._config.content);\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Popover.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Popover);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap scrollspy.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$2 = 'scrollspy';\nconst DATA_KEY$2 = 'bs.scrollspy';\nconst EVENT_KEY$2 = `.${DATA_KEY$2}`;\nconst DATA_API_KEY = '.data-api';\nconst EVENT_ACTIVATE = `activate${EVENT_KEY$2}`;\nconst EVENT_CLICK = `click${EVENT_KEY$2}`;\nconst EVENT_LOAD_DATA_API$1 = `load${EVENT_KEY$2}${DATA_API_KEY}`;\nconst CLASS_NAME_DROPDOWN_ITEM = 'dropdown-item';\nconst CLASS_NAME_ACTIVE$1 = 'active';\nconst SELECTOR_DATA_SPY = '[data-bs-spy=\"scroll\"]';\nconst SELECTOR_TARGET_LINKS = '[href]';\nconst SELECTOR_NAV_LIST_GROUP = '.nav, .list-group';\nconst SELECTOR_NAV_LINKS = '.nav-link';\nconst SELECTOR_NAV_ITEMS = '.nav-item';\nconst SELECTOR_LIST_ITEMS = '.list-group-item';\nconst SELECTOR_LINK_ITEMS = `${SELECTOR_NAV_LINKS}, ${SELECTOR_NAV_ITEMS} > ${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}`;\nconst SELECTOR_DROPDOWN = '.dropdown';\nconst SELECTOR_DROPDOWN_TOGGLE$1 = '.dropdown-toggle';\nconst Default$1 = {\n offset: null,\n // TODO: v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: '0px 0px -25%',\n smoothScroll: false,\n target: null,\n threshold: [0.1, 0.5, 1]\n};\nconst DefaultType$1 = {\n offset: '(number|null)',\n // TODO v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: 'string',\n smoothScroll: 'boolean',\n target: 'element',\n threshold: 'array'\n};\n\n/**\n * Class definition\n */\n\nclass ScrollSpy extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n\n // this._element is the observablesContainer and config.target the menu links wrapper\n this._targetLinks = new Map();\n this._observableSections = new Map();\n this._rootElement = getComputedStyle(this._element).overflowY === 'visible' ? null : this._element;\n this._activeTarget = null;\n this._observer = null;\n this._previousScrollData = {\n visibleEntryTop: 0,\n parentScrollTop: 0\n };\n this.refresh(); // initialize\n }\n\n // Getters\n static get Default() {\n return Default$1;\n }\n static get DefaultType() {\n return DefaultType$1;\n }\n static get NAME() {\n return NAME$2;\n }\n\n // Public\n refresh() {\n this._initializeTargetsAndObservables();\n this._maybeEnableSmoothScroll();\n if (this._observer) {\n this._observer.disconnect();\n } else {\n this._observer = this._getNewObserver();\n }\n for (const section of this._observableSections.values()) {\n this._observer.observe(section);\n }\n }\n dispose() {\n this._observer.disconnect();\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n // TODO: on v6 target should be given explicitly & remove the {target: 'ss-target'} case\n config.target = getElement(config.target) || document.body;\n\n // TODO: v6 Only for backwards compatibility reasons. Use rootMargin only\n config.rootMargin = config.offset ? `${config.offset}px 0px -30%` : config.rootMargin;\n if (typeof config.threshold === 'string') {\n config.threshold = config.threshold.split(',').map(value => Number.parseFloat(value));\n }\n return config;\n }\n _maybeEnableSmoothScroll() {\n if (!this._config.smoothScroll) {\n return;\n }\n\n // unregister any previous listeners\n EventHandler.off(this._config.target, EVENT_CLICK);\n EventHandler.on(this._config.target, EVENT_CLICK, SELECTOR_TARGET_LINKS, event => {\n const observableSection = this._observableSections.get(event.target.hash);\n if (observableSection) {\n event.preventDefault();\n const root = this._rootElement || window;\n const height = observableSection.offsetTop - this._element.offsetTop;\n if (root.scrollTo) {\n root.scrollTo({\n top: height,\n behavior: 'smooth'\n });\n return;\n }\n\n // Chrome 60 doesn't support `scrollTo`\n root.scrollTop = height;\n }\n });\n }\n _getNewObserver() {\n const options = {\n root: this._rootElement,\n threshold: this._config.threshold,\n rootMargin: this._config.rootMargin\n };\n return new IntersectionObserver(entries => this._observerCallback(entries), options);\n }\n\n // The logic of selection\n _observerCallback(entries) {\n const targetElement = entry => this._targetLinks.get(`#${entry.target.id}`);\n const activate = entry => {\n this._previousScrollData.visibleEntryTop = entry.target.offsetTop;\n this._process(targetElement(entry));\n };\n const parentScrollTop = (this._rootElement || document.documentElement).scrollTop;\n const userScrollsDown = parentScrollTop >= this._previousScrollData.parentScrollTop;\n this._previousScrollData.parentScrollTop = parentScrollTop;\n for (const entry of entries) {\n if (!entry.isIntersecting) {\n this._activeTarget = null;\n this._clearActiveClass(targetElement(entry));\n continue;\n }\n const entryIsLowerThanPrevious = entry.target.offsetTop >= this._previousScrollData.visibleEntryTop;\n // if we are scrolling down, pick the bigger offsetTop\n if (userScrollsDown && entryIsLowerThanPrevious) {\n activate(entry);\n // if parent isn't scrolled, let's keep the first visible item, breaking the iteration\n if (!parentScrollTop) {\n return;\n }\n continue;\n }\n\n // if we are scrolling up, pick the smallest offsetTop\n if (!userScrollsDown && !entryIsLowerThanPrevious) {\n activate(entry);\n }\n }\n }\n _initializeTargetsAndObservables() {\n this._targetLinks = new Map();\n this._observableSections = new Map();\n const targetLinks = SelectorEngine.find(SELECTOR_TARGET_LINKS, this._config.target);\n for (const anchor of targetLinks) {\n // ensure that the anchor has an id and is not disabled\n if (!anchor.hash || isDisabled(anchor)) {\n continue;\n }\n const observableSection = SelectorEngine.findOne(decodeURI(anchor.hash), this._element);\n\n // ensure that the observableSection exists & is visible\n if (isVisible(observableSection)) {\n this._targetLinks.set(decodeURI(anchor.hash), anchor);\n this._observableSections.set(anchor.hash, observableSection);\n }\n }\n }\n _process(target) {\n if (this._activeTarget === target) {\n return;\n }\n this._clearActiveClass(this._config.target);\n this._activeTarget = target;\n target.classList.add(CLASS_NAME_ACTIVE$1);\n this._activateParents(target);\n EventHandler.trigger(this._element, EVENT_ACTIVATE, {\n relatedTarget: target\n });\n }\n _activateParents(target) {\n // Activate dropdown parents\n if (target.classList.contains(CLASS_NAME_DROPDOWN_ITEM)) {\n SelectorEngine.findOne(SELECTOR_DROPDOWN_TOGGLE$1, target.closest(SELECTOR_DROPDOWN)).classList.add(CLASS_NAME_ACTIVE$1);\n return;\n }\n for (const listGroup of SelectorEngine.parents(target, SELECTOR_NAV_LIST_GROUP)) {\n // Set triggered links parents as active\n // With both