diff --git a/.github/workflows/autodoc_tutorials.yml b/.github/workflows/autodoc_tutorials.yml deleted file mode 100644 index aade7b32..00000000 --- a/.github/workflows/autodoc_tutorials.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: autogenerate docs from tutorials -on: - # Triggers the workflow on push but only for the main branch - push: - branches: [ auto-doc-generation ] - paths: ['**.ipynb'] - -jobs: - docs-from-tutorials: - runs-on: ubuntu-latest - steps: - - name: Maximize build space - uses: easimon/maximize-build-space@master - with: - root-reserve-mb: 512 - swap-size-mb: 1024 - remove-dotnet: 'true' - remove-codeql: 'true' - remove-android: 'true' - remove-docker-images: 'true' - - # Check out repo and set up Python - - name: Check out the repository - uses: actions/checkout@v4 - with: - lfs: true - - # Use cached python and dependencies, install poetry - - name: "Setup Python, Poetry and Dependencies" - uses: packetcoders/action-setup-cache-python-poetry@main - with: - python-version: 3.8 - poetry-version: 1.2.2 - - - name: Install jupyter - run: poetry install -E notebook - - - name: Install pandoc - run: | - sudo wget https://github.com/jgm/pandoc/releases/download/3.1.8/pandoc-3.1.8-1-amd64.deb - sudo dpkg -i pandoc-3.1.8-1-amd64.deb - - # Execute and convert notebooks - - name: execute notebooks - run: | - cd $GITHUB_WORKSPACE/tutorial - curl -L --output ./WSe2.zip https://zenodo.org/record/6369728/files/WSe2.zip - unzip -d . -o ./WSe2.zip - find $GITHUB_WORKSPACE/tutorial -type f -name "*.ipynb" -exec poetry run jupyter-nbconvert --execute --to notebook --inplace {} \; - - - name: create docs/tutorial directory - run: mkdir -p $GITHUB_WORKSPACE/docs/tutorial - - - name: convert notebooks to rst and save to docs - run: find $GITHUB_WORKSPACE/tutorial -type f -name "*.ipynb" -exec poetry run jupyter-nbconvert --to rst --output-dir $GITHUB_WORKSPACE/docs/tutorial {} \; - # find tutorial -type f -name "*.ipynb" -exec poetry run jupyter-nbconvert --to rst --output-dir docs/tutorial {} \; - - - name: Commit changes - uses: EndBug/add-and-commit@v9 - with: - default_author: github_actions - message: 'Updating tutorial rsts for docs' - add: 'docs/tutorial/' diff --git a/.github/workflows/build_deploy_docs.yml b/.github/workflows/build_deploy_docs.yml new file mode 100644 index 00000000..487e4ae0 --- /dev/null +++ b/.github/workflows/build_deploy_docs.yml @@ -0,0 +1,97 @@ +name: build and deploy docs to pages +on: + # Triggers the workflow on push but only for the main branch + push: + branches: [ auto-doc-generation ] + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + + # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Maximize build space + uses: easimon/maximize-build-space@master + with: + root-reserve-mb: 512 + swap-size-mb: 1024 + remove-dotnet: 'true' + remove-codeql: 'true' + remove-android: 'true' + remove-docker-images: 'true' + + # Check out repo and set up Python + - name: Check out the repository + uses: actions/checkout@v4 + with: + lfs: true + + # Use cached python and dependencies, install poetry + - name: "Setup Python, Poetry and Dependencies" + uses: packetcoders/action-setup-cache-python-poetry@main + with: + python-version: 3.8 + poetry-version: 1.2.2 + + - name: Install docs and notebook dependencies + run: poetry install -E notebook -E docs + + - name: Install pandoc + run: | + sudo wget https://github.com/jgm/pandoc/releases/download/3.1.8/pandoc-3.1.8-1-amd64.deb + sudo dpkg -i pandoc-3.1.8-1-amd64.deb + + - name: copy tutorial files to docs + run: | + cp -r $GITHUB_WORKSPACE/tutorial $GITHUB_WORKSPACE/docs/ + cp -r $GITHUB_WORKSPACE/sed/config $GITHUB_WORKSPACE/docs/sed + + # To be included later + # - name: Cache docs build + # id: cache-docs + # uses: actions/cache@v3 + # with: + # path: $GITHUB_WORKSPACE/_build + # key: ${{ runner.os }}-docs + + - name: download WSe2 data + # if: steps.cache-primes.outputs.cache-hit != 'true' + run: | + cd $GITHUB_WORKSPACE/docs/tutorial + curl -L --output ./WSe2.zip https://zenodo.org/record/6369728/files/WSe2.zip + unzip -o ./WSe2.zip -d . + + - name: build Sphinx docs + run: poetry run sphinx-build -b html $GITHUB_WORKSPACE/docs $GITHUB_WORKSPACE/_build + + - name: Setup Pages + uses: actions/configure-pages@v3 + + - name: Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: '_build' + + # Deployment job + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/README.md b/README.md index 27a88b21..5ea5c0f6 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # sed -[![Documentation Status](https://readthedocs.org/projects/sed/badge/?version=latest)](https://sed.readthedocs.io/en/latest/?badge=latest) +[![Documentation Status](https://github.com/OpenCOMPES/sed/actions/workflows/build_deploy_docs.yml/badge.svg)](https://opencompes.github.io/sed/) ![](https://github.com/OpenCOMPES/sed/actions/workflows/linting.yml/badge.svg?branch=main) ![](https://github.com/OpenCOMPES/sed/actions/workflows/testing_multiversion.yml/badge.svg?branch=main) ![](https://img.shields.io/pypi/pyversions/sedprocessor) diff --git a/docs/conf.py b/docs/conf.py index fa9e0dc6..e1bc1bd0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,12 +10,14 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # +import tomlkit import os import sys sys.path.insert(0, os.path.abspath('..')) -import tomlkit # -- Project information ----------------------------------------------------- + + def _get_project_meta(): with open('../pyproject.toml') as pyproject: file_contents = pyproject.read() @@ -25,7 +27,7 @@ def _get_project_meta(): pkg_meta = _get_project_meta() project = str(pkg_meta['name']) -copyright = '2022, OpenCOMPES team' +copyright = '2022, OpenCOMPES team' author = 'OpenCOMPES team' # The short X.Y version @@ -38,9 +40,11 @@ def _get_project_meta(): # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx_rtd_theme','sphinx.ext.autodoc','sphinx.ext.napoleon', -'sphinx.ext.todo','sphinx.ext.coverage','sphinx.ext.autosummary', -'sphinx.ext.coverage','sphinx_autodoc_typehints'] +extensions = ['sphinx_rtd_theme', 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', + 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.autosummary', + 'sphinx.ext.coverage', 'sphinx_autodoc_typehints', "bokeh.sphinxext.bokeh_autodoc", + "bokeh.sphinxext.bokeh_plot", 'nbsphinx'] + autoclass_content = 'class' autodoc_member_order = 'bysource' @@ -61,7 +65,6 @@ def _get_project_meta(): } - # Set `typing.TYPE_CHECKING` to `True`: # https://pypi.org/project/sphinx-autodoc-typehints/ napoleon_use_param = True @@ -90,4 +93,4 @@ def _get_project_meta(): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] \ No newline at end of file +html_static_path = ['_static'] diff --git a/docs/index.rst b/docs/index.rst index 396e7c27..6d8755b6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -21,9 +21,9 @@ Single-Event DataFrame (SED) documentation :numbered: :caption: Examples - tutorial/1_binning_fake_data - tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data - tutorial/3_metadata_collection_and_export_to_NeXus + tutorial/1_binning_fake_data.ipynb + tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data.ipynb + tutorial/3_metadata_collection_and_export_to_NeXus.ipynb .. toctree:: :maxdepth: 2 diff --git a/docs/tutorial/1_binning_fake_data.rst b/docs/tutorial/1_binning_fake_data.rst deleted file mode 100644 index 1fadd8a2..00000000 --- a/docs/tutorial/1_binning_fake_data.rst +++ /dev/null @@ -1,302 +0,0 @@ -Binning demonstration on locally generated fake data -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In this example, we generate a table with random data simulating a -single event dataset. We showcase the binning method, first on a simple -single table using the bin_partition method and then in the distributed -mehthod bin_dataframe, using daks dataframes. The first method is never -really called directly, as it is simply the function called by the -bin_dataframe on each partition of the dask dataframe. - -.. code:: ipython3 - - import sys - - import dask - import numpy as np - import pandas as pd - import dask.dataframe - - import matplotlib.pyplot as plt - - sys.path.append("../") - from sed.binning import bin_partition, bin_dataframe - -Generate Fake Data ------------------- - -.. code:: ipython3 - - n_pts = 100000 - cols = ["posx", "posy", "energy"] - df = pd.DataFrame(np.random.randn(n_pts, len(cols)), columns=cols) - df - - - - -.. raw:: html - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
posxposyenergy
0-0.892244-1.4597830.311661
10.063908-0.3553350.651465
20.597229-0.429607-0.370872
3-0.481183-0.139158-0.314332
41.105455-0.1892310.412213
............
999950.281058-1.276818-1.487001
99996-0.205844-0.600267-0.126838
99997-1.167711-0.598229-0.341410
999980.345514-0.203688-0.598167
999990.138462-0.730779-2.268035
-

100000 rows × 3 columns

-
- - - -Define the binning range ------------------------- - -.. code:: ipython3 - - binAxes = ["posx", "posy", "energy"] - nBins = [120, 120, 120] - binRanges = [(-2, 2), (-2, 2), (-2, 2)] - coords = {ax: np.linspace(r[0], r[1], n) for ax, r, n in zip(binAxes, binRanges, nBins)} - -Compute the binning along the pandas dataframe ----------------------------------------------- - -.. code:: ipython3 - - %%time - res = bin_partition( - part=df, - bins=nBins, - axes=binAxes, - ranges=binRanges, - hist_mode="numba", - ) - - -.. parsed-literal:: - - CPU times: user 1.32 s, sys: 31.5 ms, total: 1.35 s - Wall time: 1.39 s - - -.. code:: ipython3 - - fig, axs = plt.subplots(1, 3, figsize=(8, 2.5), constrained_layout=True) - for i in range(3): - axs[i].imshow(res.sum(i)) - - - -.. image:: 1_binning_fake_data_files/1_binning_fake_data_8_0.png - - -Transform to dask dataframe ---------------------------- - -.. code:: ipython3 - - ddf = dask.dataframe.from_pandas(df, npartitions=50) - ddf - - - - -.. raw:: html - -
Dask DataFrame Structure:
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
posxposyenergy
npartitions=50
0float64float64float64
2000.........
............
98000.........
99999.........
-
-
Dask Name: from_pandas, 1 graph layer
- - - -compute distributed binning on the partitioned dask dataframe -------------------------------------------------------------- - -In this example, the small dataset does not give significant improvement -over the pandas implementation, at least using this number of -partitions. A single partition would be faster (you can try…) but we use -multiple for demonstration purpouses. - -.. code:: ipython3 - - %%time - res = bin_dataframe( - df=ddf, - bins=nBins, - axes=binAxes, - ranges=binRanges, - hist_mode="numba", - ) - - - -.. parsed-literal:: - - 0%| | 0/50 [00:00] - - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -.. code:: ipython3 - - # The time elapsed in the scan - sp.loader.get_elapsed_time() - - - - -.. parsed-literal:: - - 2588.4949999999994 - - - -.. code:: ipython3 - - # Apply jittering to X, Y, t, ADC columns. - # Columns are defined in the config, or can be provided as list. - sp.add_jitter() - -.. code:: ipython3 - - # Inspect data in dataframe Columns: - # axes = ['X', 'Y', 't', 'ADC'] - # bins = [100, 100, 100, 100] - # ranges = [(0, 1800), (0, 1800), (130000, 140000), (0, 9000)] - # sp.viewEventHistogram(dfpid=1, axes=axes, bins=bins, ranges=ranges) - sp.view_event_histogram(dfpid=2) - - - - - -.. raw:: html - - -
- - - - - -Distortion correction and Momentum Calibration workflow -------------------------------------------------------- - -Distortion correction -~~~~~~~~~~~~~~~~~~~~~ - -1. step: -^^^^^^^^ - -Bin and load part of the dataframe in detector coordinates, and choose -energy plane where high-symmetry points can well be identified. Either -use the interactive tool, or pre-select the range: - -.. code:: ipython3 - - #sp.bin_and_load_momentum_calibration(df_partitions=20, plane=170) - sp.bin_and_load_momentum_calibration(df_partitions=100, plane=33, width=10, apply=True) - - - -.. parsed-literal:: - - 0%| | 0/100 [00:00 -
- Figure -
- - - - - -2. Step: -^^^^^^^^ - -Next, we select a number of features corresponding to the rotational -symmetry of the material, plus the center. These can either be -auto-detected (for well-isolated points), or provided as a list (these -can be read-off the graph in the cell above). These are then symmetrized -according to the rotational symmetry, and a spline-warping correction -for the x/y coordinates is calculated, which corrects for any geometric -distortions from the perfect n-fold rotational symmetry. - -.. code:: ipython3 - - #features = np.array([[203.2, 341.96], [299.16, 345.32], [350.25, 243.70], [304.38, 149.88], [199.52, 152.48], [154.28, 242.27], [248.29, 248.62]]) - #sp.define_features(features=features, rotation_symmetry=6, include_center=True, apply=True) - # Manual selection: Use a GUI tool to select peaks: - #sp.define_features(rotation_symmetry=6, include_center=True) - #sp.generate_splinewarp(rotation_symmetry=6, include_center=True, fwhm=10, sigma=12, sigma_radius=4) - # Autodetect: Uses the DAOStarFinder routine to locate maxima. - # Parameters are: - # fwhm: Full-width at half maximum of peaks. - # sigma: Number of standard deviations above the mean value of the image peaks must have. - # sigma_radius: number of standard deviations around a peak that peaks are fitted - sp.define_features(rotation_symmetry=6, auto_detect=True, include_center=True, fwhm=10, sigma=12, sigma_radius=4, apply=True) - - - -.. parsed-literal:: - - interactive(children=(Dropdown(description='Point:', options=(0, 1, 2, 3, 4, 5, 6), value=0), Output()), _dom_… - - - -.. parsed-literal:: - - interactive(children=(FloatText(value=153.4099341149909, description='point_x'), FloatText(value=243.056397194… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -3. Step: -^^^^^^^^ - -Generate nonlinear correction using splinewarp algorithm. If no -landmarks have been defined in previous step, default parameters from -the config are used - -.. code:: ipython3 - - # Option whether a central point shall be fixed in the determiantion fo the correction - sp.generate_splinewarp(include_center=True) - - -.. parsed-literal:: - - Calulated thin spline correction based on the following landmarks: - pouter: [[153.40993411 243.05639719] - [199.53872932 152.7795494 ] - [203.00074516 342.99054667] - [299.87814564 346.19500548] - [305.62593678 150.19077869] - [350.95039536 244.78353201]] - pcent: (249.22957768646916, 249.24792658340425) - Original slice with reference features - - - - - -.. raw:: html - - -
- - - - - -.. parsed-literal:: - - Corrected slice with target features - - - - - -.. raw:: html - - -
- - - - - -.. parsed-literal:: - - Original slice with target features - - - - - -.. raw:: html - - -
- - - - - -Optional (Step 3a): -^^^^^^^^^^^^^^^^^^^ - -Save distortion correction parameters to configuration file in current -data folder: - -.. code:: ipython3 - - # Save generated distortion correction parameters for later reuse - sp.save_splinewarp() - -4. Step: -^^^^^^^^ - -To adjust scaling, position and orientation of the corrected momentum -space image, you can apply further affine transformations to the -distortion correction field. Here, first a postential scaling is -applied, next a translation, and finally a rotation around the center of -the image (defined via the config). One can either use an interactive -tool, or provide the adjusted values and apply them directly. - -.. code:: ipython3 - - #sp.pose_adjustment(xtrans=14, ytrans=18, angle=2) - sp.pose_adjustment(xtrans=8, ytrans=7, angle=-4, apply=True) - - - -.. parsed-literal:: - - interactive(children=(FloatSlider(value=1.0, description='scale', max=1.2, min=0.8, step=0.01), FloatSlider(va… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - - -.. parsed-literal:: - - Output() - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -5. Step: -^^^^^^^^ - -Finally, the momentum correction is applied to the dataframe, and -corresponding meta data are stored - -.. code:: ipython3 - - sp.apply_momentum_correction() - - -.. parsed-literal:: - - Adding corrected X/Y columns to dataframe: - Calculating inverse deformation field, this might take a moment... - Dask DataFrame Structure: - X Y t ADC Xm Ym - npartitions=100 - float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... - ... ... ... ... ... ... ... - ... ... ... ... ... ... - ... ... ... ... ... ... - Dask Name: apply_dfield, 206 graph layers - - -Momentum calibration workflow -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -1. Step: -^^^^^^^^ - -First, the momentum scaling needs to be calibtrated. Either, one can -provide the coordinates of one point outside the center, and provide its -distane to the Brillouin zone center (which is assumed to be located in -the center of the image), one can specify two points on the image and -their distance (where the 2nd point marks the BZ center),or one can -provide absolute k-coordinates of two distinct momentum points. - -If no points are provided, an interactive tool is created. Here, left -mouse click selectes the off-center point (brillouin_zone_cetnered=True) -or toggle-selects the off-center and center point. - -.. code:: ipython3 - - k_distance = 4/3*np.pi/3.28 - #sp.calibrate_momentum_axes(k_distance = k_distance) - point_a = [308, 345] - sp.calibrate_momentum_axes(point_a=point_a, k_distance = k_distance, apply=True) - #point_b = [247, 249] - #sp.calibrate_momentum_axes(point_a=point_a, point_b = point_b, k_coord_a = [.5, 1.1], k_coord_b = [1.3, 0], equiscale=False - - - -.. parsed-literal:: - - interactive(children=(IntText(value=308, description='point_a_x'), IntText(value=345, description='point_a_y')… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -Optional (Step 1a): -''''''''''''''''''' - -Save momentum calibration parameters to configuration file in current -data folder: - -.. code:: ipython3 - - # Save generated momentum calibration parameters for later reuse - sp.save_momentum_calibration() - -2. Step: -^^^^^^^^ - -Now, the distortion correction and momentum calibration needs to be -applied to the dataframe. - -.. code:: ipython3 - - sp.apply_momentum_calibration() - - -.. parsed-literal:: - - Adding kx/ky columns to dataframe: - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... - Dask Name: assign, 216 graph layers - - -Energy Correction (optional) ----------------------------- - -The purpose of the energy correction is to correct for any -momentum-dependent distortion of the energy axis, e.g. from geometric -effects in the flight tube, or from space charge - -1st step: -^^^^^^^^^ - -Here, one can select the functional form to be used, and adjust its -parameters. The binned data used for the momentum calibration is plotted -around the Fermi energy (defined by tof_fermi), and the correction -function is plotted ontop. Possible correction functions are: “sperical” -(parameter: diameter), “Lorentzian” (parameter: gamma), “Gaussian” -(parameter: sigma), and “Lorentzian_asymmetric” (parameters: gamma, -amplitude2, gamma2). - -One can either use an interactive alignment tool, or provide parameters -directly. - -.. code:: ipython3 - - #sp.adjust_energy_correction(amplitude=2.5, center=(730, 730), gamma=920, tof_fermi = 66200) - sp.adjust_energy_correction(amplitude=2.5, center=(730, 730), gamma=920, tof_fermi = 66200, apply=True) - - - -.. parsed-literal:: - - interactive(children=(FloatSlider(value=2.5, description='amplitude', max=10.0), FloatSlider(value=730.0, desc… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -Optional (Step 1a): -''''''''''''''''''' - -Save energy correction parameters to configuration file in current data -folder: - -.. code:: ipython3 - - # Save generated energy correction parameters for later reuse - sp.save_energy_correction() - -2. Step -^^^^^^^ - -After adjustment, the energy correction is directly applied to the TOF -axis. - -.. code:: ipython3 - - sp.apply_energy_correction() - - -.. parsed-literal:: - - Applying energy correction to dataframe... - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky tm - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - Dask Name: assign, 230 graph layers - - -3. Energy calibration ---------------------- - -For calibrating the energy axis, a set of data taken at different bias -voltages around the value where the measurement was taken is required. - -1. Step: -^^^^^^^^ - -In a first step, the data are loaded, binned along the TOF dimension, -and normalized. The used bias voltages can be either provided, or read -from attributes in the source files if present. - -.. code:: ipython3 - - # Load energy calibration EDCs - energycalfolder = data_path + "/energycal_2019_01_08/" - scans = np.arange(1,12) - voltages = np.arange(12,23,1) - files = [energycalfolder + r'Scan' + str(num).zfill(3) + '_' + str(num+11) + '.h5' for num in scans] - sp.load_bias_series(data_files=files, normalize=True, biases=voltages, ranges=[(64000, 75000)]) - - - -.. parsed-literal:: - - 0%| | 0/11 [00:00 - - - - - -2. Step: -^^^^^^^^ - -Next, the same peak or feature needs to be selected in each curve. For -this, one needs to define “ranges” for each curve, within which the peak -of interest is located. One can either provide these ranges manually, or -provide one range for a “reference” curve, and infer the ranges for the -other curves using a dynamic time warping algorithm. - -.. code:: ipython3 - - # Option 1 = specify the ranges containing a common feature (e.g an equivalent peak) for all bias scans - # rg = [(129031.03103103103, 129621.62162162163), (129541.54154154155, 130142.14214214214), (130062.06206206206, 130662.66266266267), (130612.61261261262, 131213.21321321322), (131203.20320320321, 131803.8038038038), (131793.7937937938, 132384.38438438438), (132434.43443443443, 133045.04504504506), (133105.10510510512, 133715.71571571572), (133805.8058058058, 134436.43643643643), (134546.54654654654, 135197.1971971972)] - # sp.find_bias_peaks(ranges=rg, infer_others=False) - # Option 2 = specify the range for one curve and infer the others - # This will open an interactive tool to select the correct ranges for the curves. - # IMPORTANT: Don't choose the range too narrow about a peak, and choose a refid - # somewhere in the middle or towards larger biases! - rg = (66100, 67000) - sp.find_bias_peaks(ranges=rg, ref_id=5, infer_others=True, apply=True) - - - -.. parsed-literal:: - - interactive(children=(IntSlider(value=5, description='refid', max=10), IntRangeSlider(value=(66100, 67000), de… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -3. Step: -^^^^^^^^ - -Next, the detected peak positions and bias voltages are used to -determine the calibration function. This can be either done by fitting -the functional form d\ :sup:`2/(t-t0)`\ 2 via lmfit (“lmfit”), or using -a polynomial approxiamtion (“lstsq” or “lsqr”). Here, one can also -define a reference id, and a reference energy. Those define the absolute -energy position of the feature used for calibration in the “reference” -trace, at the bias voltage where the final measurement has been -performed. The energy scale can be either “kientic” (decreasing energy -with increasing TOF), or “binding” (increasing energy with increasing -TOF). - -After calculating the calibration, all traces corrected with the -calibration are plotted ontop of each other, the calibration function -together with the extracted features is plotted. - -.. code:: ipython3 - - # use the refid of the bias that the measurement was taken at - # Eref can be used to set the absolute energy (kinetic energy, E-EF) of the feature used for energy calibration (if known) - refid=4 - Eref=-0.5 - # the lmfit method uses a fit of (d/(t-t0))**2 to determine the energy calibration - sp.calibrate_energy_axis(ref_energy=Eref, ref_id=refid, energy_scale="kinetic", method="lmfit") - - -.. parsed-literal:: - - [[Fit Statistics]] - # fitting method = leastsq - # function evals = 130 - # data points = 11 - # variables = 3 - chi-square = 0.00212334 - reduced chi-square = 2.6542e-04 - Akaike info crit = -88.0792774 - Bayesian info crit = -86.8855916 - [[Variables]] - d: 1.09544520 +/- 0.03592430 (3.28%) (init = 1) - t0: 7.6073e-07 +/- 7.4247e-09 (0.98%) (init = 1e-06) - E0: -46.6134126 +/- 0.64226365 (1.38%) (init = -22) - [[Correlations]] (unreported correlations are < 0.100) - C(d, t0) = -0.9997 - C(d, E0) = -0.9988 - C(t0, E0) = +0.9974 - Quality of Calibration: - - - - - -.. raw:: html - - -
- - - - - -.. parsed-literal:: - - E/TOF relationship: - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -Optional (Step 3a): -''''''''''''''''''' - -Save energy calibration parameters to configuration file in current data -folder: - -.. code:: ipython3 - - # Save generated energy calibration parameters for later reuse - sp.save_energy_calibration() - -4. Step: -^^^^^^^^ - -Finally, the the energy axis is added to the dataframe. - -.. code:: ipython3 - - sp.append_energy_axis() - - -.. parsed-literal:: - - Adding energy column to dataframe: - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky tm energy - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - Dask Name: assign, 240 graph layers - - -4. Delay calibration: ---------------------- - -The delay axis is calculated from the ADC input column based on the -provided delay range. ALternatively, the delay scan range can also be -extracted from attributes inside a source file, if present. - -.. code:: ipython3 - - #from pathlib import Path - #datafile = "file.h5" - #print(datafile) - #sp.calibrate_delay_axis(datafile=datafile) - delay_range = (-500, 1500) - sp.calibrate_delay_axis(delay_range=delay_range, preview=True) - - -.. parsed-literal:: - - Adding delay column to dataframe: - X Y t ADC Xm \ - 0 0.212945 0.212945 0.212945 0.212945 -12.966236 - 1 364.881143 1001.881143 70100.881143 6316.881143 354.714506 - 2 761.446814 818.446814 75615.446814 6316.446814 790.350068 - 3 692.455863 971.455863 66455.455863 6317.455863 712.673503 - 4 670.565557 711.565557 73025.565557 6316.565557 695.169961 - 5 299.434403 1164.434403 68459.434403 6316.434403 282.149004 - 6 570.680891 664.680891 73902.680891 6315.680891 588.507368 - 7 822.115300 545.115300 72632.115300 6318.115300 846.594377 - 8 817.568558 415.568558 72421.568558 6316.568558 836.477911 - 9 1005.829423 666.829423 72801.829423 6316.829423 1037.903575 - - Ym kx ky tm energy delay - 0 87.166832 -2.417453 -2.106699 -47.985445 -25.209509 -660.265673 - 1 1030.009193 -1.276388 0.819329 70083.868621 -9.300472 1471.943002 - 2 838.353388 0.075568 0.224542 75614.555583 -16.703309 1471.796393 - 3 983.456676 -0.165494 0.674858 66449.755902 -0.820206 1472.137000 - 4 739.476304 -0.219815 -0.082314 73025.171634 -13.802182 1471.836475 - 5 1183.226206 -1.501589 1.294825 68432.920826 -5.958803 1471.792204 - 6 699.539735 -0.550833 -0.206254 73899.753463 -14.873129 1471.537854 - 7 585.834730 0.250118 -0.559127 72627.964178 -13.280445 1472.359595 - 8 465.231161 0.218722 -0.933409 72411.898419 -12.986664 1471.837488 - 9 707.647623 0.843829 -0.181091 72794.353709 -13.501836 1471.925543 - - -5. Visualization of calibrated histograms ------------------------------------------ - -With all calibrated axes present in the dataframe, we can visualize the -corresponding histograms, and determine the respective binning ranges - -.. code:: ipython3 - - axes = ['kx', 'ky', 'energy', 'delay'] - ranges = [[-3, 3], [-3, 3], [-6, 2], [-600, 1600]] - sp.view_event_histogram(dfpid=1, axes=axes, ranges=ranges) - - - - - -.. raw:: html - - -
- - - - - -Define the binning ranges and compute calibrated data volume ------------------------------------------------------------- - -.. code:: ipython3 - - axes = ['kx', 'ky', 'energy', 'delay'] - bins = [100, 100, 200, 50] - ranges = [[-2, 2], [-2, 2], [-4, 2], [-600, 1600]] - res = sp.compute(bins=bins, axes=axes, ranges=ranges) - - - -.. parsed-literal:: - - 0%| | 0/100 [00:00 - - - - -.. raw:: html - - -
-
- Figure -
- -
- - - diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_10_3.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_10_3.png deleted file mode 100644 index 09f7282a..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_10_3.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_12_3.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_12_3.png deleted file mode 100644 index c42699fd..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_12_3.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_2.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_2.png deleted file mode 100644 index 75e2f25e..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_2.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_4.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_4.png deleted file mode 100644 index 8626e5e7..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_18_4.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_22_2.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_22_2.png deleted file mode 100644 index ff9d8a61..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_22_2.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_29_2.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_29_2.png deleted file mode 100644 index 92ce557f..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_29_2.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_38_2.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_38_2.png deleted file mode 100644 index fd128cef..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_38_2.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_40_5.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_40_5.png deleted file mode 100644 index 60a6210c..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_40_5.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_52_1.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_52_1.png deleted file mode 100644 index d3ac5355..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_52_1.png and /dev/null differ diff --git a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_5_1.png b/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_5_1.png deleted file mode 100644 index 9fe24cac..00000000 Binary files a/docs/tutorial/2_conversion_pipeline_for_example_time-resolved_ARPES_data_files/2_conversion_pipeline_for_example_time-resolved_ARPES_data_5_1.png and /dev/null differ diff --git a/docs/tutorial/3_metadata_collection_and_export_to_NeXus.rst b/docs/tutorial/3_metadata_collection_and_export_to_NeXus.rst deleted file mode 100644 index 5ae0720e..00000000 --- a/docs/tutorial/3_metadata_collection_and_export_to_NeXus.rst +++ /dev/null @@ -1,399 +0,0 @@ -Binning with metadata generation, and storing into a NeXus file -=============================================================== - -In this example, we show how to bin the same data used for example 3, -but using the values for correction/calibration parameters generated in -the example notebook 3, which are locally saved in the file -sed_config.yaml. These data and the corresponding (machine and -processing) metadata are then stored to a NeXus file following the -NXmpes NeXus standard -(https://fairmat-experimental.github.io/nexus-fairmat-proposal/9636feecb79bb32b828b1a9804269573256d7696/classes/contributed_definitions/NXmpes.html#nxmpes) -using the ‘dataconverter’ of the pynxtools package -(https://github.com/FAIRmat-NFDI/pynxtools). - -.. code:: ipython3 - - %load_ext autoreload - %autoreload 2 - import numpy as np - import matplotlib.pyplot as plt - import os - - import sed - - %matplotlib widget - -Load Data ---------- - -.. code:: ipython3 - - data_path = '.' # Put in Path to a storage of at least 20 Gbyte free space. - if not os.path.exists(data_path + "/WSe2.zip"): - os.system(f"curl -L --output {data_path}/WSe2.zip https://zenodo.org/record/6369728/files/WSe2.zip") - if not os.path.isdir(data_path + "/Scan049_1") or not os.path.isdir(data_path + "/energycal_2019_01_08/"): - os.system(f"unzip -d {data_path} -o {data_path}/WSe2.zip") - -.. code:: ipython3 - - metadata = {} - # manual Meta data. These should ideally come from an Electronic Lab Notebook. - #General - metadata['experiment_summary'] = 'WSe2 XUV NIR pump probe data.' - metadata['entry_title'] = 'Valence Band Dynamics - 800 nm linear s-polarized pump, 0.6 mJ/cm2 absorbed fluence' - metadata['experiment_title'] = 'Valence band dynamics of 2H-WSe2' - - #User - # Fill general parameters of NXuser - # TODO: discuss how to deal with multiple users? - metadata['user0'] = {} - metadata['user0']['name'] = 'Julian Maklar' - metadata['user0']['role'] = 'Principal Investigator' - metadata['user0']['affiliation'] = 'Fritz Haber Institute of the Max Planck Society' - metadata['user0']['address'] = 'Faradayweg 4-6, 14195 Berlin' - metadata['user0']['email'] = 'maklar@fhi-berlin.mpg.de' - - #NXinstrument - metadata['instrument'] = {} - #analyzer - metadata['instrument']['analyzer']={} - metadata['instrument']['analyzer']['slow_axes'] = "delay" # the scanned axes - metadata['instrument']['analyzer']['spatial_resolution'] = 10. - metadata['instrument']['analyzer']['energy_resolution'] = 110. - metadata['instrument']['analyzer']['momentum_resolution'] = 0.08 - metadata['instrument']['analyzer']['working_distance'] = 4. - metadata['instrument']['analyzer']['lens_mode'] = "6kV_kmodem4.0_30VTOF.sav" - - #probe beam - metadata['instrument']['beam']={} - metadata['instrument']['beam']['probe']={} - metadata['instrument']['beam']['probe']['incident_energy'] = 21.7 - metadata['instrument']['beam']['probe']['incident_energy_spread'] = 0.11 - metadata['instrument']['beam']['probe']['pulse_duration'] = 20. - metadata['instrument']['beam']['probe']['frequency'] = 500. - metadata['instrument']['beam']['probe']['incident_polarization'] = [1, 1, 0, 0] # p pol Stokes vector - metadata['instrument']['beam']['probe']['extent'] = [80., 80.] - #pump beam - metadata['instrument']['beam']['pump']={} - metadata['instrument']['beam']['pump']['incident_energy'] = 1.55 - metadata['instrument']['beam']['pump']['incident_energy_spread'] = 0.08 - metadata['instrument']['beam']['pump']['pulse_duration'] = 35. - metadata['instrument']['beam']['pump']['frequency'] = 500. - metadata['instrument']['beam']['pump']['incident_polarization'] = [1, -1, 0, 0] # s pol Stokes vector - metadata['instrument']['beam']['pump']['incident_wavelength'] = 800. - metadata['instrument']['beam']['pump']['average_power'] = 300. - metadata['instrument']['beam']['pump']['pulse_energy'] = metadata['instrument']['beam']['pump']['average_power']/metadata['instrument']['beam']['pump']['frequency']#µJ - metadata['instrument']['beam']['pump']['extent'] = [230., 265.] - metadata['instrument']['beam']['pump']['fluence'] = 0.15 - - #sample - metadata['sample']={} - metadata['sample']['preparation_date'] = '2019-01-13T10:00:00+00:00' - metadata['sample']['preparation_description'] = 'Cleaved' - metadata['sample']['sample_history'] = 'Cleaved' - metadata['sample']['chemical_formula'] = 'WSe2' - metadata['sample']['description'] = 'Sample' - metadata['sample']['name'] = 'WSe2 Single Crystal' - - metadata['file'] = {} - metadata['file']["trARPES:Carving:TEMP_RBV"] = 300. - metadata['file']["trARPES:XGS600:PressureAC:P_RD"] = 5.e-11 - metadata['file']["KTOF:Lens:Extr:I"] = -0.12877 - metadata['file']["KTOF:Lens:UDLD:V"] = 399.99905 - metadata['file']["KTOF:Lens:Sample:V"] = 17.19976 - metadata['file']["KTOF:Apertures:m1.RBV"] = 3.729931 - metadata['file']["KTOF:Apertures:m2.RBV"] = -5.200078 - metadata['file']["KTOF:Apertures:m3.RBV"] = -11.000425 - - # Sample motor positions - metadata['file']['trARPES:Carving:TRX.RBV'] = 7.1900000000000004 - metadata['file']['trARPES:Carving:TRY.RBV'] = -6.1700200225439552 - metadata['file']['trARPES:Carving:TRZ.RBV'] = 33.4501953125 - metadata['file']['trARPES:Carving:THT.RBV'] = 423.30500940561586 - metadata['file']['trARPES:Carving:PHI.RBV'] = 0.99931647456264949 - metadata['file']['trARPES:Carving:OMG.RBV'] = 11.002500171914066 - -.. code:: ipython3 - - # The Scan directory - fdir = data_path + '/Scan049_1' - # create sed processor using the config file, and collect the meta data from the files: - sp = sed.SedProcessor(folder=fdir, config="../sed/config/mpes_example_config.yaml", metadata=metadata, collect_metadata=True) - - -.. parsed-literal:: - - Configuration loaded from: [/home/runner/work/sed/sed/sed/config/mpes_example_config.yaml] - Folder config loaded from: [/home/runner/work/sed/sed/tutorial/sed_config.yaml] - Default config loaded from: [/home/runner/work/sed/sed/sed/config/default.yaml] - Gathering metadata from different locations - Collecting time stamps... - Collecting file metadata... - Collecting data from the EPICS archive... - - -.. code:: ipython3 - - # Apply jittering to X, Y, t, ADC columns. - sp.add_jitter() - -.. code:: ipython3 - - # Calculate machine-coordinate data for pose adjustment - sp.bin_and_load_momentum_calibration(df_partitions=10, plane=33, width=10, apply=True) - - - -.. parsed-literal:: - - 0%| | 0/10 [00:00 -
- Figure -
- - - - - -.. code:: ipython3 - - # Adjust pose alignment, using stored distortion correction - sp.pose_adjustment(xtrans=8, ytrans=7, angle=-4, apply=True, use_correction=True) - - -.. parsed-literal:: - - Calulated thin spline correction based on the following landmarks: - pouter: [[203.2 341.96] - [299.16 345.32] - [350.25 243.7 ] - [304.38 149.88] - [199.52 152.48] - [154.28 242.27]] - pcent: (248.29, 248.62) - - - -.. parsed-literal:: - - interactive(children=(FloatSlider(value=1.0, description='scale', max=1.2, min=0.8, step=0.01), FloatSlider(va… - - - -.. parsed-literal:: - - Button(description='apply', style=ButtonStyle()) - - - -.. raw:: html - - -
-
- Figure -
- -
- - - - -.. parsed-literal:: - - Output() - - - -.. raw:: html - - -
-
- Figure -
- -
- - - -.. code:: ipython3 - - # Apply stored momentum correction - sp.apply_momentum_correction() - - -.. parsed-literal:: - - Adding corrected X/Y columns to dataframe: - Calculating inverse deformation field, this might take a moment... - Dask DataFrame Structure: - X Y t ADC Xm Ym - npartitions=100 - float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... - ... ... ... ... ... ... ... - ... ... ... ... ... ... - ... ... ... ... ... ... - Dask Name: apply_dfield, 206 graph layers - - -.. code:: ipython3 - - # Apply stored config momentum calibration - sp.apply_momentum_calibration() - - -.. parsed-literal:: - - Adding kx/ky columns to dataframe: - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... - Dask Name: assign, 216 graph layers - - -.. code:: ipython3 - - # Apply stored config energy correction - sp.apply_energy_correction() - - -.. parsed-literal:: - - Applying energy correction to dataframe... - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky tm - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... - Dask Name: assign, 230 graph layers - - -.. code:: ipython3 - - # Apply stored config energy calibration - sp.append_energy_axis() - - -.. parsed-literal:: - - Adding energy column to dataframe: - Dask DataFrame Structure: - X Y t ADC Xm Ym kx ky tm energy - npartitions=100 - float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - ... ... ... ... ... ... ... ... ... ... - Dask Name: assign, 240 graph layers - - -.. code:: ipython3 - - # Apply delay calibration - delay_range = (-500, 1500) - sp.calibrate_delay_axis(delay_range=delay_range, preview=True) - - -.. parsed-literal:: - - Adding delay column to dataframe: - X Y t ADC Xm \ - 0 0.304069 0.304069 0.304069 0.304069 -23.287495 - 1 364.575699 1001.575699 70100.575699 6316.575699 352.397573 - 2 761.340241 818.340241 75615.340241 6316.340241 790.823530 - 3 692.158208 971.158208 66455.158208 6317.158208 713.107004 - 4 671.361639 712.361639 73026.361639 6317.361639 695.964200 - 5 298.818915 1163.818915 68458.818915 6315.818915 279.973019 - 6 571.053798 665.053798 73903.053798 6316.053798 587.429179 - 7 822.155640 545.155640 72632.155640 6318.155640 845.225397 - 8 818.261624 416.261624 72422.261624 6317.261624 834.627119 - 9 1006.253903 667.253903 72802.253903 6317.253903 1036.080260 - - Ym kx ky tm energy delay - 0 96.990568 -2.450894 -2.078351 -47.888991 -25.060130 -660.234913 - 1 1032.180586 -1.287266 0.818260 70083.559408 -9.287393 1471.839898 - 2 837.298641 0.070692 0.214642 75614.451566 -16.576959 1471.760419 - 3 982.064607 -0.170023 0.663033 66449.469058 -0.845837 1472.036526 - 4 740.392674 -0.223120 -0.085510 73025.980144 -13.731258 1472.105195 - 5 1184.982772 -1.511590 1.291542 68432.305535 -5.971073 1471.584444 - 6 701.331526 -0.559291 -0.206496 73900.142328 -14.783116 1471.663729 - 7 585.869445 0.239194 -0.564122 72628.005210 -13.216942 1472.373212 - 8 466.420982 0.206368 -0.934096 72412.616768 -12.928625 1472.071434 - 9 706.827017 0.830338 -0.189474 72794.762764 -13.435300 1472.068828 - - -Compute final data volume -------------------------- - -.. code:: ipython3 - - axes = ['kx', 'ky', 'energy', 'delay'] - bins = [100, 100, 200, 50] - ranges = [[-2, 2], [-2, 2], [-4, 2], [-600, 1600]] - res = sp.compute(bins=bins, axes=axes, ranges=ranges) - - - -.. parsed-literal:: - - 0%| | 0/100 [00:00 - - - diff --git a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_7_3.png b/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_7_3.png deleted file mode 100644 index 0261de63..00000000 Binary files a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_7_3.png and /dev/null differ diff --git a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_3.png b/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_3.png deleted file mode 100644 index c1c8c7b8..00000000 Binary files a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_3.png and /dev/null differ diff --git a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_5.png b/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_5.png deleted file mode 100644 index 0cddfaaa..00000000 Binary files a/docs/tutorial/3_metadata_collection_and_export_to_NeXus_files/3_metadata_collection_and_export_to_NeXus_8_5.png and /dev/null differ diff --git a/poetry.lock b/poetry.lock index eb0283c9..90509ae3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -497,101 +497,101 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.1" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, - {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -1232,57 +1232,57 @@ files = [ [[package]] name = "fonttools" -version = "4.43.1" +version = "4.44.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.43.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bf11e2cca121df35e295bd34b309046c29476ee739753bc6bc9d5050de319273"}, - {file = "fonttools-4.43.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10b3922875ffcba636674f406f9ab9a559564fdbaa253d66222019d569db869c"}, - {file = "fonttools-4.43.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f727c3e3d08fd25352ed76cc3cb61486f8ed3f46109edf39e5a60fc9fecf6ca"}, - {file = "fonttools-4.43.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad0b3f6342cfa14be996971ea2b28b125ad681c6277c4cd0fbdb50340220dfb6"}, - {file = "fonttools-4.43.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b7ad05b2beeebafb86aa01982e9768d61c2232f16470f9d0d8e385798e37184"}, - {file = "fonttools-4.43.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c54466f642d2116686268c3e5f35ebb10e49b0d48d41a847f0e171c785f7ac7"}, - {file = "fonttools-4.43.1-cp310-cp310-win32.whl", hash = "sha256:1e09da7e8519e336239fbd375156488a4c4945f11c4c5792ee086dd84f784d02"}, - {file = "fonttools-4.43.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cf9e974f63b1080b1d2686180fc1fbfd3bfcfa3e1128695b5de337eb9075cef"}, - {file = "fonttools-4.43.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5db46659cfe4e321158de74c6f71617e65dc92e54980086823a207f1c1c0e24b"}, - {file = "fonttools-4.43.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1952c89a45caceedf2ab2506d9a95756e12b235c7182a7a0fff4f5e52227204f"}, - {file = "fonttools-4.43.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c36da88422e0270fbc7fd959dc9749d31a958506c1d000e16703c2fce43e3d0"}, - {file = "fonttools-4.43.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bbbf8174501285049e64d174e29f9578495e1b3b16c07c31910d55ad57683d8"}, - {file = "fonttools-4.43.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d4071bd1c183b8d0b368cc9ed3c07a0f6eb1bdfc4941c4c024c49a35429ac7cd"}, - {file = "fonttools-4.43.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d21099b411e2006d3c3e1f9aaf339e12037dbf7bf9337faf0e93ec915991f43b"}, - {file = "fonttools-4.43.1-cp311-cp311-win32.whl", hash = "sha256:b84a1c00f832feb9d0585ca8432fba104c819e42ff685fcce83537e2e7e91204"}, - {file = "fonttools-4.43.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a2f0aa6ca7c9bc1058a9d0b35483d4216e0c1bbe3962bc62ce112749954c7b8"}, - {file = "fonttools-4.43.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4d9740e3783c748521e77d3c397dc0662062c88fd93600a3c2087d3d627cd5e5"}, - {file = "fonttools-4.43.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:884ef38a5a2fd47b0c1291647b15f4e88b9de5338ffa24ee52c77d52b4dfd09c"}, - {file = "fonttools-4.43.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9648518ef687ba818db3fcc5d9aae27a369253ac09a81ed25c3867e8657a0680"}, - {file = "fonttools-4.43.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95e974d70238fc2be5f444fa91f6347191d0e914d5d8ae002c9aa189572cc215"}, - {file = "fonttools-4.43.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:34f713dad41aa21c637b4e04fe507c36b986a40f7179dcc86402237e2d39dcd3"}, - {file = "fonttools-4.43.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:360201d46165fc0753229afe785900bc9596ee6974833124f4e5e9f98d0f592b"}, - {file = "fonttools-4.43.1-cp312-cp312-win32.whl", hash = "sha256:bb6d2f8ef81ea076877d76acfb6f9534a9c5f31dc94ba70ad001267ac3a8e56f"}, - {file = "fonttools-4.43.1-cp312-cp312-win_amd64.whl", hash = "sha256:25d3da8a01442cbc1106490eddb6d31d7dffb38c1edbfabbcc8db371b3386d72"}, - {file = "fonttools-4.43.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8da417431bfc9885a505e86ba706f03f598c85f5a9c54f67d63e84b9948ce590"}, - {file = "fonttools-4.43.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:51669b60ee2a4ad6c7fc17539a43ffffc8ef69fd5dbed186a38a79c0ac1f5db7"}, - {file = "fonttools-4.43.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748015d6f28f704e7d95cd3c808b483c5fb87fd3eefe172a9da54746ad56bfb6"}, - {file = "fonttools-4.43.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7a58eb5e736d7cf198eee94844b81c9573102ae5989ebcaa1d1a37acd04b33d"}, - {file = "fonttools-4.43.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6bb5ea9076e0e39defa2c325fc086593ae582088e91c0746bee7a5a197be3da0"}, - {file = "fonttools-4.43.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5f37e31291bf99a63328668bb83b0669f2688f329c4c0d80643acee6e63cd933"}, - {file = "fonttools-4.43.1-cp38-cp38-win32.whl", hash = "sha256:9c60ecfa62839f7184f741d0509b5c039d391c3aff71dc5bc57b87cc305cff3b"}, - {file = "fonttools-4.43.1-cp38-cp38-win_amd64.whl", hash = "sha256:fe9b1ec799b6086460a7480e0f55c447b1aca0a4eecc53e444f639e967348896"}, - {file = "fonttools-4.43.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13a9a185259ed144def3682f74fdcf6596f2294e56fe62dfd2be736674500dba"}, - {file = "fonttools-4.43.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2adca1b46d69dce4a37eecc096fe01a65d81a2f5c13b25ad54d5430ae430b13"}, - {file = "fonttools-4.43.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18eefac1b247049a3a44bcd6e8c8fd8b97f3cad6f728173b5d81dced12d6c477"}, - {file = "fonttools-4.43.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2062542a7565091cea4cc14dd99feff473268b5b8afdee564f7067dd9fff5860"}, - {file = "fonttools-4.43.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18a2477c62a728f4d6e88c45ee9ee0229405e7267d7d79ce1f5ce0f3e9f8ab86"}, - {file = "fonttools-4.43.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7a06f8d95b7496e53af80d974d63516ffb263a468e614978f3899a6df52d4b3"}, - {file = "fonttools-4.43.1-cp39-cp39-win32.whl", hash = "sha256:10003ebd81fec0192c889e63a9c8c63f88c7d72ae0460b7ba0cd2a1db246e5ad"}, - {file = "fonttools-4.43.1-cp39-cp39-win_amd64.whl", hash = "sha256:e117a92b07407a061cde48158c03587ab97e74e7d73cb65e6aadb17af191162a"}, - {file = "fonttools-4.43.1-py3-none-any.whl", hash = "sha256:4f88cae635bfe4bbbdc29d479a297bb525a94889184bb69fa9560c2d4834ddb9"}, - {file = "fonttools-4.43.1.tar.gz", hash = "sha256:17dbc2eeafb38d5d0e865dcce16e313c58265a6d2d20081c435f84dc5a9d8212"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.0.0)", "xattr", "zopfli (>=0.1.4)"] + {file = "fonttools-4.44.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1cd1c6bb097e774d68402499ff66185190baaa2629ae2f18515a2c50b93db0c"}, + {file = "fonttools-4.44.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9eab7f9837fdaa2a10a524fbcc2ec24bf60637c044b6e4a59c3f835b90f0fae"}, + {file = "fonttools-4.44.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f412954275e594f7a51c16f3b3edd850acb0d842fefc33856b63a17e18499a5"}, + {file = "fonttools-4.44.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50d25893885e80a5955186791eed5579f1e75921751539cc1dc3ffd1160b48cf"}, + {file = "fonttools-4.44.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:22ea8aa7b3712450b42b044702bd3a64fd118006bad09a6f94bd1b227088492e"}, + {file = "fonttools-4.44.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df40daa6c03b98652ffe8110ae014fe695437f6e1cb5a07e16ea37f40e73ac86"}, + {file = "fonttools-4.44.0-cp310-cp310-win32.whl", hash = "sha256:bca49da868e8bde569ef36f0cc1b6de21d56bf9c3be185c503b629c19a185287"}, + {file = "fonttools-4.44.0-cp310-cp310-win_amd64.whl", hash = "sha256:dbac86d83d96099890e731cc2af97976ff2c98f4ba432fccde657c5653a32f1c"}, + {file = "fonttools-4.44.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e8ff7d19a6804bfd561cfcec9b4200dd1788e28f7de4be70189801530c47c1b3"}, + {file = "fonttools-4.44.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8a1fa9a718de0bc026979c93e1e9b55c5efde60d76f91561fd713387573817d"}, + {file = "fonttools-4.44.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05064f95aacdfc06f21e55096c964b2228d942b8675fa26995a2551f6329d2d"}, + {file = "fonttools-4.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31b38528f25bc662401e6ffae14b3eb7f1e820892fd80369a37155e3b636a2f4"}, + {file = "fonttools-4.44.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:05d7c4d2c95b9490e669f3cb83918799bf1c838619ac6d3bad9ea017cfc63f2e"}, + {file = "fonttools-4.44.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6999e80a125b0cd8e068d0210b63323f17338038c2ecd2e11b9209ec430fe7f2"}, + {file = "fonttools-4.44.0-cp311-cp311-win32.whl", hash = "sha256:a7aec7f5d14dfcd71fb3ebc299b3f000c21fdc4043079101777ed2042ba5b7c5"}, + {file = "fonttools-4.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:518a945dbfe337744bfff31423c1430303b8813c5275dffb0f2577f0734a1189"}, + {file = "fonttools-4.44.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:59b6ad83cce067d10f4790c037a5904424f45bebb5e7be2eb2db90402f288267"}, + {file = "fonttools-4.44.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c2de1fb18198acd400c45ffe2aef5420c8d55fde903e91cba705596099550f3b"}, + {file = "fonttools-4.44.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84f308b7a8d28208d54315d11d35f9888d6d607673dd4d42d60b463682ee0400"}, + {file = "fonttools-4.44.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66bc6efd829382f7a7e6cf33c2fb32b13edc8a239eb15f32acbf197dce7a0165"}, + {file = "fonttools-4.44.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a8b99713d3a0d0e876b6aecfaada5e7dc9fe979fcd90ef9fa0ba1d9b9aed03f2"}, + {file = "fonttools-4.44.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b63da598d9cbc52e2381f922da0e94d60c0429f92207bd3fb04d112fc82ea7cb"}, + {file = "fonttools-4.44.0-cp312-cp312-win32.whl", hash = "sha256:f611c97678604e302b725f71626edea113a5745a7fb557c958b39edb6add87d5"}, + {file = "fonttools-4.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:58af428746fa73a2edcbf26aff33ac4ef3c11c8d75bb200eaea2f7e888d2de4e"}, + {file = "fonttools-4.44.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9ee8692e23028564c13d924004495f284df8ac016a19f17a87251210e1f1f928"}, + {file = "fonttools-4.44.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dab3d00d27b1a79ae4d4a240e8ceea8af0ff049fd45f05adb4f860d93744110d"}, + {file = "fonttools-4.44.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53526668beccdb3409c6055a4ffe50987a7f05af6436fa55d61f5e7bd450219"}, + {file = "fonttools-4.44.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3da036b016c975c2d8c69005bdc4d5d16266f948a7fab950244e0f58301996a"}, + {file = "fonttools-4.44.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b99fe8ef4093f672d00841569d2d05691e50334d79f4d9c15c1265d76d5580d2"}, + {file = "fonttools-4.44.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d16d9634ff1e5cea2cf4a8cbda9026f766e4b5f30b48f8180f0e99133d3abfc"}, + {file = "fonttools-4.44.0-cp38-cp38-win32.whl", hash = "sha256:3d29509f6e05e8d725db59c2d8c076223d793e4e35773040be6632a0349f2f97"}, + {file = "fonttools-4.44.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4fa4f4bc8fd86579b8cdbe5e948f35d82c0eda0091c399d009b2a5a6b61c040"}, + {file = "fonttools-4.44.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c794de4086f06ae609b71ac944ec7deb09f34ecf73316fddc041087dd24bba39"}, + {file = "fonttools-4.44.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2db63941fee3122e31a21dd0f5b2138ce9906b661a85b63622421d3654a74ae2"}, + {file = "fonttools-4.44.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb01c49c8aa035d5346f46630209923d4927ed15c2493db38d31da9f811eb70d"}, + {file = "fonttools-4.44.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c79af80a835410874683b5779b6c1ec1d5a285e11c45b5193e79dd691eb111"}, + {file = "fonttools-4.44.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b6e6aa2d066f8dafd06d8d0799b4944b5d5a1f015dd52ac01bdf2895ebe169a0"}, + {file = "fonttools-4.44.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:63a3112f753baef8c6ac2f5f574bb9ac8001b86c8c0c0380039db47a7f512d20"}, + {file = "fonttools-4.44.0-cp39-cp39-win32.whl", hash = "sha256:54efed22b2799a85475e6840e907c402ba49892c614565dc770aa97a53621b2b"}, + {file = "fonttools-4.44.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e91e19b583961979e2e5a701269d3cfc07418963bee717f8160b0a24332826b"}, + {file = "fonttools-4.44.0-py3-none-any.whl", hash = "sha256:b9beb0fa6ff3ea808ad4a6962d68ac0f140ddab080957b20d9e268e4d67fb335"}, + {file = "fonttools-4.44.0.tar.gz", hash = "sha256:4e90dd81b6e0d97ebfe52c0d12a17a9ef7f305d6bfbb93081265057d6092f252"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] interpolatable = ["munkres", "scipy"] lxml = ["lxml (>=4.0,<5)"] @@ -1292,7 +1292,7 @@ repacker = ["uharfbuzz (>=0.23.0)"] symfont = ["sympy"] type1 = ["xattr"] ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.0.0)"] +unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] @@ -1933,13 +1933,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.19.2" description = "An implementation of JSON Schema validation for Python" optional = true python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.19.2-py3-none-any.whl", hash = "sha256:eee9e502c788e89cb166d4d37f43084e3b64ab405c795c03d343a4dbc2c810fc"}, + {file = "jsonschema-4.19.2.tar.gz", hash = "sha256:c9ff4d7447eed9592c23a12ccee508baf0dd0d59650615e847feb6cdca74f392"}, ] [package.dependencies] @@ -2161,13 +2161,13 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", [[package]] name = "jupyterlab" -version = "4.0.7" +version = "4.0.8" description = "JupyterLab computational environment" optional = true python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.7-py3-none-any.whl", hash = "sha256:08683045117cc495531fdb39c22ababb9aaac6977a45e67cfad20046564c9c7c"}, - {file = "jupyterlab-4.0.7.tar.gz", hash = "sha256:48792efd9f962b2bcda1f87d72168ff122c288b1d97d32109e4a11b33dc862be"}, + {file = "jupyterlab-4.0.8-py3-none-any.whl", hash = "sha256:2ff5aa2a51eb21df241d6011c236e88bd1ff9a5dbb75bebc54472f9c18bfffa4"}, + {file = "jupyterlab-4.0.8.tar.gz", hash = "sha256:c4fe93f977bcc987bd395d7fae5ab02e0c042bf4e0f7c95196f3e2e578c2fb3a"}, ] [package.dependencies] @@ -2187,7 +2187,7 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.292)"] docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -2261,27 +2261,27 @@ files = [ [[package]] name = "kikuchipy" -version = "0.8.7" +version = "0.9.0" description = "Processing, simulating and indexing of electron backscatter diffraction (EBSD) patterns." optional = false python-versions = ">=3.7" files = [ - {file = "kikuchipy-0.8.7-py3-none-any.whl", hash = "sha256:522c80355480d31b487dc8e78879a071e0dc72f36b595974ab145dfb52563d66"}, - {file = "kikuchipy-0.8.7.tar.gz", hash = "sha256:4108baba07cd73c5133fbff44053b0621f5cbdfdf2d1c48ccbc970fcfb1f3ce3"}, + {file = "kikuchipy-0.9.0-py3-none-any.whl", hash = "sha256:7f9e2c8c6d71f8382a3724d14ec2a5dd41a47a1f3e6aeee532498bf98c915a87"}, + {file = "kikuchipy-0.9.0.tar.gz", hash = "sha256:f0523ac0245d59c598fbaa0c62da46e5b08de306a62a7a99d4c140cc422772be"}, ] [package.dependencies] dask = {version = ">=2021.8.1", extras = ["array"]} "diffpy.structure" = ">=3" -diffsims = ">=0.5" +diffsims = ">=0.5.1" h5py = ">=2.10" -hyperspy = ">=1.7.3" +hyperspy = ">=1.7.3,<2" imageio = "*" -matplotlib = ">=3.3" +matplotlib = ">=3.5" numba = ">=0.55" -numpy = ">=1.19" +numpy = ">=1.21.6" orix = ">=0.11.1" -pooch = ">=0.13" +pooch = ">=1.3.0" pyyaml = "*" scikit-image = ">=0.16.2" scikit-learn = "*" @@ -2289,11 +2289,10 @@ scipy = ">=1.7" tqdm = ">=0.5.2" [package.extras] -all = ["matplotlib (>=3.5)", "nlopt", "pyebsdindex (>=0.1,<1.0)", "pyvista"] -dev = ["black[jupyter] (>=23.1)", "coverage (>=5.0)", "manifix", "matplotlib (>=3.5)", "memory-profiler", "nbsphinx (>=0.7)", "nlopt", "numpydoc", "outdated", "panel", "pre-commit (>=1.16)", "pydata-sphinx-theme", "pyebsdindex (>=0.1,<1.0)", "pyebsdindex (>=0.1.1)", "pytest (>=5.4)", "pytest-benchmark", "pytest-cov (>=2.8.1)", "pytest-xdist", "pyvista", "sphinx (>=3.0.2)", "sphinx-codeautolink[ipython] (<0.14)", "sphinx-copybutton (>=0.2.5)", "sphinx-design", "sphinx-gallery", "sphinxcontrib-bibtex (>=1.0)"] -doc = ["memory-profiler", "nbsphinx (>=0.7)", "nlopt", "numpydoc", "panel", "pydata-sphinx-theme", "pyebsdindex (>=0.1.1)", "pyvista", "sphinx (>=3.0.2)", "sphinx-codeautolink[ipython] (<0.14)", "sphinx-copybutton (>=0.2.5)", "sphinx-design", "sphinx-gallery", "sphinxcontrib-bibtex (>=1.0)"] -tests = ["coverage (>=5.0)", "numpydoc", "pytest (>=5.4)", "pytest-benchmark", "pytest-cov (>=2.8.1)", "pytest-xdist"] -viz = ["matplotlib (>=3.5)", "pyvista"] +all = ["matplotlib (>=3.5)", "nlopt", "pyebsdindex (>=0.2,<1.0)", "pyvista"] +dev = ["black[jupyter] (>=23.1)", "coverage (>=5.0)", "manifix", "matplotlib (>=3.5)", "memory-profiler", "nbsphinx (>=0.7)", "nlopt", "numpydoc", "outdated", "pre-commit (>=1.16)", "pydata-sphinx-theme", "pyebsdindex (>=0.2,<1.0)", "pytest (>=5.4)", "pytest-benchmark", "pytest-cov (>=2.8.1)", "pytest-rerunfailures", "pytest-xdist", "pyvista", "sphinx (>=3.0.2)", "sphinx-codeautolink[ipython] (<0.14)", "sphinx-copybutton (>=0.2.5)", "sphinx-design", "sphinx-gallery", "sphinxcontrib-bibtex (>=1.0)"] +doc = ["memory-profiler", "nbsphinx (>=0.7)", "nlopt", "numpydoc", "pydata-sphinx-theme", "pyebsdindex (>=0.2,<1.0)", "pyvista", "sphinx (>=3.0.2)", "sphinx-codeautolink[ipython] (<0.14)", "sphinx-copybutton (>=0.2.5)", "sphinx-design", "sphinx-gallery", "sphinxcontrib-bibtex (>=1.0)"] +tests = ["coverage (>=5.0)", "numpydoc", "pytest (>=5.4)", "pytest-benchmark", "pytest-cov (>=2.8.1)", "pytest-rerunfailures", "pytest-xdist"] [[package]] name = "kiwisolver" @@ -3011,6 +3010,25 @@ traitlets = ">=5.1" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["pep440", "pre-commit", "pytest", "testpath"] +[[package]] +name = "nbsphinx" +version = "0.9.3" +description = "Jupyter Notebook Tools for Sphinx" +optional = true +python-versions = ">=3.6" +files = [ + {file = "nbsphinx-0.9.3-py3-none-any.whl", hash = "sha256:6e805e9627f4a358bd5720d5cbf8bf48853989c79af557afd91a5f22e163029f"}, + {file = "nbsphinx-0.9.3.tar.gz", hash = "sha256:ec339c8691b688f8676104a367a4b8cf3ea01fd089dc28d24dec22d563b11562"}, +] + +[package.dependencies] +docutils = "*" +jinja2 = "*" +nbconvert = "!=5.4" +nbformat = "*" +sphinx = ">=1.8" +traitlets = ">=5" + [[package]] name = "nest-asyncio" version = "1.5.8" @@ -3797,6 +3815,17 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +optional = false +python-versions = "*" +files = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] + [[package]] name = "pooch" version = "1.8.0" @@ -3848,13 +3877,13 @@ tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"] [[package]] name = "prometheus-client" -version = "0.17.1" +version = "0.18.0" description = "Python client for the Prometheus monitoring system." optional = true -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, + {file = "prometheus_client-0.18.0-py3-none-any.whl", hash = "sha256:8de3ae2755f890826f4b6479e5571d4f74ac17a81345fe69a6778fdb92579184"}, + {file = "prometheus_client-0.18.0.tar.gz", hash = "sha256:35f7a8c22139e2bb7ca5a698e92d38145bc8dc74c1c0bf56f25cca886a764e17"}, ] [package.extras] @@ -3981,17 +4010,19 @@ numpy = ">=1.16.6" [[package]] name = "pycifrw" -version = "4.4.5" +version = "4.4.6" description = "CIF/STAR file support for Python" optional = false python-versions = "*" files = [ - {file = "PyCifRW-4.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:0be684f65902dd9eead0300c92e4949af1c1d6eb9139dc5e5209b1b64cfe5436"}, - {file = "PyCifRW-4.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:b3768955cd235dfc936565d3425d7ee60c0e23d6154d73ca3960af4c10bd67da"}, - {file = "PyCifRW-4.4.5-cp39-cp39-manylinux_2_5_x86_64.whl", hash = "sha256:4b6f793603dfa731b9aba4bf9e379d42e843b57e77829a3fce3fb8bc9a563c1a"}, - {file = "PyCifRW-4.4.5.tar.gz", hash = "sha256:a8fd092cbefe6ddd4b7e667d05682e5c8c2e64ba63e7ef8b3ce4c07f2ff62827"}, + {file = "PyCifRW-4.4.6-cp311-cp311-manylinux_2_5_x86_64.whl", hash = "sha256:a89844ed5811700f995d1913a248d29d5745078ffd0f957b7e0574d74a48d0df"}, + {file = "PyCifRW-4.4.6.tar.gz", hash = "sha256:02bf5975e70ab71540bff62fbef3e8354ac707a0f0ab914a152047962891ef15"}, ] +[package.dependencies] +numpy = "*" +ply = "*" + [[package]] name = "pycodestyle" version = "2.11.1" @@ -5274,23 +5305,23 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.24.0" +version = "1.24.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = true python-versions = ">=3.8" files = [ - {file = "sphinx_autodoc_typehints-1.24.0-py3-none-any.whl", hash = "sha256:6a73c0c61a9144ce2ed5ef2bed99d615254e5005c1cc32002017d72d69fb70e6"}, - {file = "sphinx_autodoc_typehints-1.24.0.tar.gz", hash = "sha256:94e440066941bb237704bb880785e2d05e8ae5406c88674feefbb938ad0dc6af"}, + {file = "sphinx_autodoc_typehints-1.24.1-py3-none-any.whl", hash = "sha256:4cc16c5545f2bf896ca52a854babefe3d8baeaaa033d13a7f179ac1d9feb02d5"}, + {file = "sphinx_autodoc_typehints-1.24.1.tar.gz", hash = "sha256:06683a2b76c3c7b1931b75e40e0211866fbb50ba4c4e802d0901d9b4e849add2"}, ] [package.dependencies] -furo = {version = ">=2023.5.20", optional = true, markers = "extra == \"docs\""} -sphinx = ">=7.0.1" +furo = {version = ">=2023.7.26", optional = true, markers = "extra == \"docs\""} +sphinx = ">=7.1.2" [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)"] numpy = ["nptyping (>=2.5)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.6.3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.7.1)"] [[package]] name = "sphinx-basic-ng" @@ -5584,13 +5615,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.1" +version = "0.12.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, + {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"}, + {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"}, ] [[package]] @@ -5888,13 +5919,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcwidth" -version = "0.2.8" +version = "0.2.9" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, + {file = "wcwidth-0.2.9-py2.py3-none-any.whl", hash = "sha256:9a929bd8380f6cd9571a968a9c8f4353ca58d7cd812a4822bba831f8d685b223"}, + {file = "wcwidth-0.2.9.tar.gz", hash = "sha256:a675d1a4a2d24ef67096a04b85b02deeecd8e226f57b5e3a72dbb9ed99d27da8"}, ] [[package]] @@ -6020,10 +6051,10 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] -docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "tomlkit"] +docs = ["nbsphinx", "sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "tomlkit"] notebook = ["ipykernel", "jupyter", "jupyterlab-h5web"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "1269eba7417fa4ce82173aa10912f814e367a6d3c452edb00cc3058c40a42fa4" +content-hash = "10a18aa12d733655eb7581612b98c5c0ed22191ddb3bfe7f0a4f8e66b0e5e6e4" diff --git a/pyproject.toml b/pyproject.toml index 98a1b0f0..d9bda125 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,11 +38,12 @@ sphinx = {version = ">4.4.0", extras = ["docs"], optional = true} sphinx-rtd-theme = {version = ">1.0.0", extras = ["docs"], optional = true} tomlkit = {version = ">0.10.0", extras = ["docs"], optional = true} sphinx-autodoc-typehints = {version = ">1.17.0", extras = ["docs"], optional = true} +nbsphinx = {version = "^0.9.3", extras = ["docs"], optional = true} [tool.poetry.extras] notebook = ["jupyter", "ipykernel", "jupyterlab-h5web"] -docs = ["Sphinx", "sphinx-rtd-theme", "tomlkit", "sphinx-autodoc-typehints"] +docs = ["Sphinx", "sphinx-rtd-theme", "tomlkit", "sphinx-autodoc-typehints", "nbsphinx"] [tool.poetry.group.dev.dependencies] pytest = "^7.0.1"