From 6357ea4492b86aa94d1b8e8fb4ebb6cce2d4013e Mon Sep 17 00:00:00 2001 From: rettigl Date: Thu, 27 Jun 2024 22:57:58 +0200 Subject: [PATCH 1/3] spell-check all files --- .cspell/custom-dictionary.txt | 150 ++++++++++++++++++ .gitignore | 4 + README.md | 6 +- cspell.json | 23 +++ docs/conf.py | 2 - docs/requirements.txt | 10 -- docs/specsanalyzer/config.rst | 2 +- specsanalyzer/config.py | 4 +- specsanalyzer/config/default.yaml | 2 +- specsanalyzer/convert.py | 36 ++--- specsanalyzer/core.py | 14 +- specsanalyzer/img_tools.py | 2 +- specsanalyzer/io.py | 8 +- specsscan/core.py | 16 +- specsscan/helpers.py | 2 +- tests/helpers.py | 4 +- tests/test_convert.py | 10 +- tests/test_helpers.py | 2 +- tests/test_specsscan.py | 2 +- .../1_specsanalyzer_conversion_examples.ipynb | 6 +- tutorial/2_specsscan_example.ipynb | 4 +- .../3_specsscan_conversion_to_NeXus.ipynb | 2 +- tutorial/4_specsscan_load_sweep_scan.ipynb | 2 +- 23 files changed, 239 insertions(+), 74 deletions(-) create mode 100644 .cspell/custom-dictionary.txt create mode 100644 cspell.json delete mode 100755 docs/requirements.txt diff --git a/.cspell/custom-dictionary.txt b/.cspell/custom-dictionary.txt new file mode 100644 index 0000000..cc73d73 --- /dev/null +++ b/.cspell/custom-dictionary.txt @@ -0,0 +1,150 @@ +# Custom Dictionary Words +allclose +ALLUSERSPROFILE +amperemeter +analyser +arange +archiver +argwhere +ARPES +astype +autoclass +automodule +autoreload +autosummary +axhline +AXISNAME +axvline +basepath +bitshift +bysource +calib +checkscan +clim +codemirror +coeff +COLLECTIONCOLUMN +colorbar +COMPES +configpath +Croping +cropit +damatrix +dapolymatrix +dataconverter +dataframe +delaystage +dtype +dxda +dxde +dyda +dyde +Ekin +electronanalyser +elems +endstation +energydispersion +eshift +faddr +Faradayweg +fftshift +filt +fluence +fspath +FWHM +genindex +hline +ifftshift +IMAGEJ +imgj +imread +imshow +imutils +imwrite +Interp +ipykernel +ipympl +ipython +ipywidgets +irfft +kernelspec +kwds +labview +Laurenz +lensmodes +lineh +linev +listf +literalinclude +loadtxt +matplotlib +maxdepth +mbar +meshgrid +modindex +mpes +nanos +nbagg +nbconvert +nbformat +nbsphinx +ndarray +ndarrays +ndim +ndimage +ndims +ndmin +nesteddict +nestedentry +numpy +nxdl +Nxpix +Nxpixels +Nypixels +OPCPA +pcolormesh +Phoibos +polyfit +polyval +pyenv +pygments +pynxtools +pyplot +pyproject +quickstart +RAWDATA +Rettig +rfft +rrvec +rtol +rtype +scanvector +specsanalyzer +Specslab +specsscan +stype +tempa +tifffile +tmpdirname +toctree +tomlkit +topfloor +tqdm +typehints +TZCYXS +undoc +venv +vline +vmax +vmin +xarray +xarrays +xdata +xgrid +Xmpes +xpos +Xuser +ydata +ygrid +ypos +zinner diff --git a/.gitignore b/.gitignore index 14ceb11..4a0e404 100755 --- a/.gitignore +++ b/.gitignore @@ -138,3 +138,7 @@ dmypy.json *.h5 *.tiff *.nxs +*.pxp + +# poetry +poetry.toml diff --git a/README.md b/README.md index 6b1e6bd..d858914 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ This is the package `specsanalyzer` for conversion and handling of SPECS Phoibos This package contains two modules: `specsanalyzer` is a package to import and convert MCP analyzer images from SPECS Phoibos analyzers into energy and emission angle/physical coordinates. -`specsscan` is a Python package for loading Specs Phoibos scans accquired with the labview software developed at FHI/EPFL +`specsscan` is a Python package for loading Specs Phoibos scans acquired with the labview software developed at FHI/EPFL Tutorials for usage and the API documentation can be found in the [Documentation](https://opencompes.github.io/specsanalyzer/) @@ -47,9 +47,9 @@ python -m ipykernel install --user --name=specs_kernel ``` #### Configuration and calib2d file -The conversion procedures require to set up several configuration parameters in a config file. An example config file is provided as part of the package (see documentation). Configuration files can either be passed to the class constructures, or are read from system-wide or user-defined locations (see documentation). +The conversion procedures require to set up several configuration parameters in a config file. An example config file is provided as part of the package (see documentation). Configuration files can either be passed to the class constructors, or are read from system-wide or user-defined locations (see documentation). -Most importantly, conversion of analyzer data to energy/angular coordinates requires detector calibration data provided by the manufacturer. The corresponding *.calib2d file (e.g. phoibos150.calbid2d) are provided together with the spectrometer software, and need to be set in the config file. +Most importantly, conversion of analyzer data to energy/angular coordinates requires detector calibration data provided by the manufacturer. The corresponding *.calib2d file (e.g. phoibos150.calib2d) are provided together with the spectrometer software, and need to be set in the config file. ### For Contributors diff --git a/cspell.json b/cspell.json new file mode 100644 index 0000000..bf7a020 --- /dev/null +++ b/cspell.json @@ -0,0 +1,23 @@ +{ + "version": "0.2", + "ignorePaths": [ + "./tests/data/*", + "*.toml", + "Makefile", + "*.bat", + "*.pxp" + ], + "dictionaryDefinitions": [ + { + "name": "custom-dictionary", + "path": "./.cspell/custom-dictionary.txt", + "addWords": true + } + ], + "dictionaries": [ + "custom-dictionary" + ], + "words": [], + "ignoreWords": [], + "import": [] +} diff --git a/docs/conf.py b/docs/conf.py index 8a102c8..7352eb2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,8 +50,6 @@ def _get_project_meta(): "sphinx.ext.autosummary", "sphinx.ext.coverage", "sphinx_autodoc_typehints", - # "bokeh.sphinxext.bokeh_autodoc", - # "bokeh.sphinxext.bokeh_plot", "nbsphinx", "myst_parser", ] diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100755 index 6a76295..0000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -sphinx-autodoc-typehints -sphinx-rtd-theme -sphinx -sphinxcontrib-applehelp -sphinxcontrib-devhelp -sphinxcontrib-htmlhelp -sphinxcontrib-jsmath -sphinxcontrib-qthelp -sphinxcontrib-serializinghtml -sphinxcontrib-websupport diff --git a/docs/specsanalyzer/config.rst b/docs/specsanalyzer/config.rst index d74dd70..ab95c68 100644 --- a/docs/specsanalyzer/config.rst +++ b/docs/specsanalyzer/config.rst @@ -1,6 +1,6 @@ Config =================================================== -The config module contains a mechanis to collect configuration parameters from various sources and configuration files, and to combine them in a hierachical manner into a single, consistent configuration dictionary. +The config module contains a mechanics to collect configuration parameters from various sources and configuration files, and to combine them in a hierarchical manner into a single, consistent configuration dictionary. It will load an (optional) provided config file, or alternatively use a passed python dictionary as initial config dictionary, and subsequently look for the following additional config files to load: * ``folder_config``: A config file of name :file:`specs_config.yaml` in the current working directory. This is mostly intended to pass calibration parameters of the workflow between different notebook instances. diff --git a/specsanalyzer/config.py b/specsanalyzer/config.py index a88f243..f946744 100755 --- a/specsanalyzer/config.py +++ b/specsanalyzer/config.py @@ -171,10 +171,10 @@ def load_config(config_path: str) -> dict: def save_config(config_dict: dict, config_path: str, overwrite: bool = False): """Function to save a given config dictionary to a json or yaml file. Normally, it loads any existing file of the given name, and keeps any existing dictionary keys not present in the - provided dictionary. The overwrite option creates a fully empty dictionry first. + provided dictionary. The overwrite option creates a fully empty dictionary first. Args: - config_dict (dict): The dictionry to save. + config_dict (dict): The dictionary to save. config_path (str): A string containing the path to the file where to save the dictionary to. overwrite (bool, optional): Option to overwrite an existing file with the given dictionary. diff --git a/specsanalyzer/config/default.yaml b/specsanalyzer/config/default.yaml index 5a81996..f547bc2 100644 --- a/specsanalyzer/config/default.yaml +++ b/specsanalyzer/config/default.yaml @@ -14,5 +14,5 @@ magnification: 4.54 angle_offset_px: 0 # offset in pixels along the energy dispersing axis energy_offset_px: 0 -# flag controlling the application of a Fourier filter to remove grid artefacts +# flag controlling the application of a Fourier filter to remove grid artifacts apply_fft_filter: false diff --git a/specsanalyzer/convert.py b/specsanalyzer/convert.py index 0fba330..871bc85 100755 --- a/specsanalyzer/convert.py +++ b/specsanalyzer/convert.py @@ -5,7 +5,7 @@ from scipy.ndimage import map_coordinates -def get_damatrix_fromcalib2d( +def get_damatrix_from_calib2d( lens_mode: str, kinetic_energy: float, pass_energy: float, @@ -14,7 +14,7 @@ def get_damatrix_fromcalib2d( ) -> tuple[float, np.ndarray, float, str, list[str]]: """This function estimates the best angular conversion coefficients for the current analyser mode, starting from a dictionary containing the specs .calib2d database. A linear interpolation - is performed from the tabulated coefficients based on the retardatio ratio value. + is performed from the tabulated coefficients based on the retardation ratio value. Args: lens_mode (str): the lens mode string description @@ -104,7 +104,7 @@ def bisection(array: np.ndarray, value: float) -> int: Given an ``array`` , and given a ``value`` , returns an index j such that ``value`` is between array[j] and array[j+1]. ``array`` must be monotonic increasing. j=-1 or j=len(array) is returned to indicate that ``value`` is out of range below and above respectively. - This should mimick the function BinarySearch in igor pro 6 + This should mimic the function BinarySearch in igor pro 6 Args: array (np.ndarray): ordered array @@ -148,7 +148,7 @@ def second_closest_rr(rrvec: np.ndarray, closest_rr_index: int) -> int: int: nearest rr index to calculate the best da coefficients """ if closest_rr_index == (rrvec.size - 1): - # we are the edge: the behaviour is to not change the index + # we are the edge: the behavior is to not change the index second_closest_rr_index = closest_rr_index else: second_closest_rr_index = closest_rr_index + 1 @@ -160,7 +160,7 @@ def get_rr_da( lens_mode: str, calib2d_dict: dict, ) -> tuple[np.ndarray, np.ndarray]: - """Get the retardatio ratios and the da for a certain lens mode from the confugaration + """Get the retardation ratios and the da for a certain lens mode from the configuration dictionary Args: @@ -173,7 +173,7 @@ def get_rr_da( ValueError: Raised if no da values are found for the given mode Returns: - tuple[np.ndarray, np.ndarray]: rr vector, matrix of da coeffients + tuple[np.ndarray, np.ndarray]: rr vector, matrix of da coefficients per row row0 : da1, row1: da3, .. up to da7. Non angle resolved lens modes do only posses da1. """ @@ -236,26 +236,26 @@ def calculate_polynomial_coef_da( pass_energy: float, e_shift: np.ndarray, ) -> np.ndarray: - """Given the da coeffiecients contained in the scanpareters, the program calculates the energy + """Given the da coefficients contained in the scan parameters, the program calculates the energy range based on the eshift parameter and fits a second order polynomial to the tabulated values. The polynomial coefficients are packed in the dapolymatrix array (row0 da1, row1 da3, ..) - The function returns a matrix of the fit coeffiecients, given the physical energy scale + The function returns a matrix of the fit coefficients, given the physical energy scale Each line of the matrix is a set of coefficients for each of the da[i] corrections Args: da_matrix (np.ndarray): the matrix of interpolated da coefficients - kinetic_energy (float): photoelectorn kinetic energy + kinetic_energy (float): photoelectron kinetic energy pass_energy (float): analyser pass energy e_shift (np.ndarray): e shift parameter, defining the energy range around the center for the polynomial fit of the da coefficients Returns: - np.ndarray: dapolymatrix containg the fit results (row0 da1, row1 da3, ..) + np.ndarray: dapolymatrix containing the fit results (row0 da1, row1 da3, ..) """ - # calcualte the energy values for each da, given the eshift + # calculate the energy values for each da, given the eshift da_energy = e_shift * pass_energy + kinetic_energy * np.ones(e_shift.shape) - # create the polynomial coeffiecient matrix, each is a second order polynomial + # create the polynomial coefficient matrix, each is a second order polynomial da_poly_matrix = np.zeros(da_matrix.shape) for i in range(0, da_matrix.shape[0]): @@ -279,12 +279,12 @@ def zinner( mcp withing the a_inner boundaries Args: - kinetic_energy (flonp.ndarrayat): kinetic energies + kinetic_energy (np.ndarray): kinetic energies angle (np.ndarray): angles da_poly_matrix (np.ndarray): matrix with polynomial coefficients Returns: - np.ndarray: returns the calcualted positions on the mcp, valid for low angles (< ainner) + np.ndarray: returns the calculated positions on the mcp, valid for low angles (< a_inner) """ out = np.zeros(angle.shape, float) @@ -303,7 +303,7 @@ def zinner_diff( da_poly_matrix: np.ndarray, ) -> np.ndarray: """Auxiliary function for mcp_position_mm, uses kinetic energy and angle starting from the - dapolymatrix, to get the zinner_diff coefficient to coorect the electron arrival position on + dapolymatrix, to get the zinner_diff coefficient to correct the electron arrival position on the mcp outside the a_inner boundaries Args: @@ -313,7 +313,7 @@ def zinner_diff( Returns: np.ndarray: zinner_diff the correction for the zinner position on the MCP for high - (>ainner) angles. + (>a_inner) angles. """ out = np.zeros(angle.shape, float) @@ -349,7 +349,7 @@ def mcp_position_mm( np.ndarray: lateral position of photoelectron on the mcp (angular dispersing axis) """ - # define two angular regions: within and outsied the a_inner boundaries + # define two angular regions: within and outside the a_inner boundaries mask = np.less_equal(np.abs(angle), a_inner) a_inner_vec = np.ones(angle.shape) * a_inner @@ -513,7 +513,7 @@ def physical_unit_data( e_correction_matrix = np.ones(angular_correction_matrix.shape) * e_correction # flatten the x and y to a 2 x N coordinates array - # N = Nxpix x Nypixels + # N = Nxpixels x Nypixels coords = np.array([angular_correction_matrix.flatten(), e_correction_matrix.flatten()]) # the image is expressed as intensity vs pixels, diff --git a/specsanalyzer/core.py b/specsanalyzer/core.py index ec3ce58..bf2266c 100755 --- a/specsanalyzer/core.py +++ b/specsanalyzer/core.py @@ -17,7 +17,7 @@ from specsanalyzer.config import complete_dictionary from specsanalyzer.config import parse_config from specsanalyzer.convert import calculate_matrix_correction -from specsanalyzer.convert import get_damatrix_fromcalib2d +from specsanalyzer.convert import get_damatrix_from_calib2d from specsanalyzer.convert import physical_unit_data from specsanalyzer.img_tools import crop_xarray from specsanalyzer.img_tools import fourier_filter_2d @@ -100,11 +100,11 @@ def convert_image( conversion_parameters: dict = None, **kwds, ) -> xr.DataArray: - """Converts an imagin in physical unit data, angle vs energy + """Converts an image in physical unit data, angle vs energy Args: raw_img (np.ndarray): Raw image data, numpy 2d matrix - lens_mode (str): analzser lens mode, check calib2d for a list of modes CamelCase naming + lens_mode (str): analyzer lens mode, check calib2d for a list of modes CamelCase naming convention e.g. "WideAngleMode" kinetic_energy (float): set analyser kinetic energy pass_energy (float): set analyser pass energy @@ -113,7 +113,7 @@ def convert_image( overwriting determination from calib2d file. Defaults to None. Returns: - xr.DataArray: xarray containg the corrected data and kinetic and angle axis + xr.DataArray: xarray containing the corrected data and kinetic and angle axis """ if conversion_parameters is None: conversion_parameters = {} @@ -163,7 +163,7 @@ def convert_image( conversion_parameters["retardation_ratio"], conversion_parameters["source"], conversion_parameters["dims"], - ) = get_damatrix_fromcalib2d( + ) = get_damatrix_from_calib2d( lens_mode=lens_mode, kinetic_energy=kinetic_energy, pass_energy=pass_energy, @@ -369,7 +369,7 @@ def crop_tool( Args: raw_img (np.ndarray): Raw image data, numpy 2d matrix - lens_mode (str): analzser lens mode, check calib2d for a list + lens_mode (str): analyzer lens mode, check calib2d for a list of modes CamelCase naming convention e.g. "WideAngleMode" kinetic_energy (float): set analyser kinetic energy pass_energy (float): set analyser pass energy @@ -585,7 +585,7 @@ def fft_tool( **kwds: Keyword arguments: - `amplitude`: Normalized amplitude of subtraction - - `pos_x`: horzontal spatial frequency of th mesh + - `pos_x`: horizontal spatial frequency of th mesh - `pos_y`: vertical spatial frequency of the mesh - `sigma_x`: horizontal frequency width - `sigma_y`: vertical frequency width diff --git a/specsanalyzer/img_tools.py b/specsanalyzer/img_tools.py index 2a940c2..707e548 100755 --- a/specsanalyzer/img_tools.py +++ b/specsanalyzer/img_tools.py @@ -40,7 +40,7 @@ def fourier_filter_2d( peaks: Sequence[dict], ret: str = "filtered", ) -> np.ndarray: - """Function to Fourier filter an image for removal of regular pattern artefacts, + """Function to Fourier filter an image for removal of regular pattern artifacts, e.g. grid lines. Args: diff --git a/specsanalyzer/io.py b/specsanalyzer/io.py index a80830e..9c1ab24 100755 --- a/specsanalyzer/io.py +++ b/specsanalyzer/io.py @@ -76,7 +76,7 @@ def recursive_write_metadata(h5group: h5py.Group, node: dict): print(f"Saved {key} as string.") except BaseException as exc: raise ValueError( - f"Unknown error occured, cannot save {item} of type {type(item)}.", + f"Unknown error occurred, cannot save {item} of type {type(item)}.", ) from exc @@ -165,7 +165,7 @@ def load_h5(faddr: str, mode: str = "r") -> xr.DataArray: mode (str, optional): hdf5 read/write mode. Defaults to "r" Returns: - xr.DataArray: output xarra data + xr.DataArray: output xarray data """ with h5py.File(faddr, mode) as h5_file: # Reading data array @@ -413,9 +413,9 @@ def to_nexus( data._attrs["metadata"]. faddr (str): The file path to save to. reader (str): The name of the NeXus reader to use. - definition (str): The NeXus definiton to use. + definition (str): The NeXus definition to use. input_files (str | Sequence[str]): The file path to the configuration file to use. - **kwds: Keyword arguments for ``nexusutils.dataconverter.convert``. + **kwds: Keyword arguments for ``pynxtools.dataconverter.convert``. """ if isinstance(input_files, str): diff --git a/specsscan/core.py b/specsscan/core.py index e7ae98a..2831333 100755 --- a/specsscan/core.py +++ b/specsscan/core.py @@ -32,7 +32,7 @@ class SpecsScan: - """SpecsAnalyzer class for loading scans and data from SPECS phoibos electron analyzers, + """SpecsAnalyzer class for loading scans and data from SPECS Phoibos electron analyzers, generated with the ARPESControl software at Fritz Haber Institute, Berlin, and EPFL, Lausanne. Args: @@ -335,7 +335,7 @@ def fft_tool(self, scan: int = None, path: Path | str = "", **kwds): - `apply`: Option to directly apply the settings. - `amplitude`: Normalized amplitude of subtraction - - `pos_x`: horzontal spatial frequency of th mesh + - `pos_x`: horizontal spatial frequency of th mesh - `pos_y`: vertical spatial frequency of the mesh - `sigma_x`: horizontal frequency width - `sigma_y`: vertical frequency width @@ -501,7 +501,7 @@ def save( - "*.h5", "*.hdf5": Saves an HDF5 file. - "*.nxs", "*.nexus": Saves a NeXus file. - **kwds: Keyword argumens, which are passed to the writer functions: + **kwds: Keyword arguments, which are passed to the writer functions: For TIFF writing: - **alias_dict**: Dictionary of dimension aliases to use. @@ -512,9 +512,9 @@ def save( For NeXus: - - **reader**: Name of the nexustools reader to use. + - **reader**: Name of the pynxtools reader to use. Defaults to config["nexus"]["reader"] - - **definiton**: NeXus application definition to use for saving. + - **definition**: NeXus application definition to use for saving. Must be supported by the used ``reader``. Defaults to config["nexus"]["definition"] - **input_files**: A list of input files to pass to the reader. @@ -584,10 +584,10 @@ def process_sweep_scan( Args: raw_data (list[np.ndarray]): List of raw data images kinetic_energy (np.ndarray): Array of analyzer set kinetic energy values - pass_energy (float): set analyser pass energy - lens_mode (str): analzser lens mode, check calib2d for a list of modes CamelCase naming + pass_energy (float): set analyzer pass energy + lens_mode (str): analyzer lens mode, check calib2d for a list of modes CamelCase naming convention e.g. "WideAngleMode" - work_function (float): set analyser work function + work_function (float): set analyzer work function Returns: xr.DataArray: Converted sweep scan diff --git a/specsscan/helpers.py b/specsscan/helpers.py index e86576a..d01687c 100644 --- a/specsscan/helpers.py +++ b/specsscan/helpers.py @@ -1,4 +1,4 @@ -"""This script contains helper functions used by the specscan class""" +"""This script contains helper functions used by the specsscan class""" from __future__ import annotations import datetime as dt diff --git a/tests/helpers.py b/tests/helpers.py index ae33e27..5e52147 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -8,7 +8,7 @@ def simulate_binned_data(shape: tuple, dims: list): """Generate a fake xr.DataArray as those generated by binning - Used for testing purpouses + Used for testing purposes Args: shape: Shape ouf the data @@ -19,7 +19,7 @@ def simulate_binned_data(shape: tuple, dims: list): """ assert len(dims) == len( shape, - ), "number of dimesions and data shape must coincide" + ), "number of dimension and data shape must coincide" ret = xr.DataArray( data=np.random.rand(*shape), diff --git a/tests/test_convert.py b/tests/test_convert.py index d41596e..7bed600 100755 --- a/tests/test_convert.py +++ b/tests/test_convert.py @@ -8,7 +8,7 @@ from specsanalyzer import SpecsAnalyzer from specsanalyzer.convert import calculate_matrix_correction from specsanalyzer.convert import calculate_polynomial_coef_da -from specsanalyzer.convert import get_damatrix_fromcalib2d +from specsanalyzer.convert import get_damatrix_from_calib2d package_dir = os.path.dirname(specsanalyzer.__file__) test_dir = package_dir + "/../tests/data/" @@ -52,14 +52,14 @@ def test_da_matrix(): # pylint: disable=too-many-locals # get the matrix_correction e_shift = np.array(calib2d_dict["eShift"]) - _, da_matrix, _, _, _ = get_damatrix_fromcalib2d( + _, da_matrix, _, _, _ = get_damatrix_from_calib2d( lens_mode, kinetic_energy, pass_energy, work_function, calib2d_dict, ) - # get the polynomial coefficent matrix + # get the polynomial coefficient matrix da_poly_matrix = calculate_polynomial_coef_da( da_matrix, kinetic_energy, @@ -86,7 +86,7 @@ def test_conversion_matrix(): binning = 4 nx_pixels = 344 ny_pixels = 256 - a_inner, da_matrix, retardation_ratio, source, dims = get_damatrix_fromcalib2d( + a_inner, da_matrix, retardation_ratio, source, dims = get_damatrix_from_calib2d( lens_mode=lens_mode, kinetic_energy=kinetic_energy, pass_energy=pass_energy, @@ -345,7 +345,7 @@ def test_recycling(): def test_cropping(): - """Test function for checking that cropping parameters are correctly appield""" + """Test function for checking that cropping parameters are correctly applied""" # get the raw data raw_image_name = os.fspath( f"{test_dir}/dataEPFL/R9132/Data9132_RAWDATA.tsv", diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 20f13e8..034b089 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -54,7 +54,7 @@ def test_averaging_with_delays(): @pytest.mark.parametrize("scan", [0, 1, 2]) def test_load_averages(scan): - """Tests loading of 3-D array with default paramteres""" + """Tests loading of 3-D array with default parameters""" data = load_images(scan_path_mirror) with open( scan_path_mirror.joinpath(f"AVG/00{scan}.tsv"), diff --git a/tests/test_specsscan.py b/tests/test_specsscan.py index d9a7601..3029a64 100755 --- a/tests/test_specsscan.py +++ b/tests/test_specsscan.py @@ -90,7 +90,7 @@ def test_conversion_3d(): def test_conversion_from_convert_dict(): - """Test the conversion without calib2d file, using passen conversion dictionary parameters""" + """Test the conversion without calib2d file, using passed conversion dictionary parameters""" sps = SpecsScan( config={}, user_config={}, diff --git a/tutorial/1_specsanalyzer_conversion_examples.ipynb b/tutorial/1_specsanalyzer_conversion_examples.ipynb index a2d9ec7..4081dab 100644 --- a/tutorial/1_specsanalyzer_conversion_examples.ipynb +++ b/tutorial/1_specsanalyzer_conversion_examples.ipynb @@ -113,7 +113,7 @@ "metadata": {}, "source": [ "## Adjusting offsets and angle\n", - "image rotation angle and center offsets can be adjusted by keyworkd arguments, or from the config." + "image rotation angle and center offsets can be adjusted by keyword arguments, or from the config." ] }, { @@ -140,7 +140,7 @@ "metadata": {}, "source": [ "## Removal of mesh artefact\n", - "The mesh in front of the MCP introduces some visial artefacts. These can be mitigated by applying a Fourier filter approach, with Peaks in the Fourier plane to remove defined in the config file." + "The mesh in front of the MCP introduces some visual artifacts. These can be mitigated by applying a Fourier filter approach, with Peaks in the Fourier plane to remove defined in the config file." ] }, { @@ -303,7 +303,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3.10.4 ('specanalyserenv')", + "display_name": "Python3", "language": "python", "name": "python3" }, diff --git a/tutorial/2_specsscan_example.ipynb b/tutorial/2_specsscan_example.ipynb index c2082c8..7b50248 100644 --- a/tutorial/2_specsscan_example.ipynb +++ b/tutorial/2_specsscan_example.ipynb @@ -241,7 +241,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We can e.g. also get a plot along the third dimension, by intergrating along the first.\n", + "We can e.g. also get a plot along the third dimension, by integrating along the first.\n", "\n", "One can also access the conversion result from a class accessor:" ] @@ -356,7 +356,7 @@ ], "metadata": { "kernelspec": { - "display_name": "specenv38", + "display_name": "python3", "language": "python", "name": "python3" }, diff --git a/tutorial/3_specsscan_conversion_to_NeXus.ipynb b/tutorial/3_specsscan_conversion_to_NeXus.ipynb index 59f03d9..13bc0f0 100755 --- a/tutorial/3_specsscan_conversion_to_NeXus.ipynb +++ b/tutorial/3_specsscan_conversion_to_NeXus.ipynb @@ -234,7 +234,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, diff --git a/tutorial/4_specsscan_load_sweep_scan.ipynb b/tutorial/4_specsscan_load_sweep_scan.ipynb index 2c2e0f6..1d58e58 100644 --- a/tutorial/4_specsscan_load_sweep_scan.ipynb +++ b/tutorial/4_specsscan_load_sweep_scan.ipynb @@ -92,7 +92,7 @@ ], "metadata": { "kernelspec": { - "display_name": "specenv38", + "display_name": "python3", "language": "python", "name": "python3" }, From 8ddecd784d116aeba82523028b0d78264a9c95e2 Mon Sep 17 00:00:00 2001 From: rettigl Date: Thu, 27 Jun 2024 23:02:11 +0200 Subject: [PATCH 2/3] add spell check action and pre-commit hook --- .github/workflows/linting.yml | 11 +++++++++-- .github/workflows/update_dependencies.yml | 2 +- .pre-commit-config.yaml | 4 ++++ 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 0097ca6..b910815 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -30,11 +30,11 @@ jobs: python-version: 3.8 poetry-version: 1.2.2 - # Linting steps, excute all linters even if one fails + # Linting steps, execute all linters even if one fails - name: ruff run: poetry run ruff specsanalyzer specsscan tests - - name: ruff formating + - name: ruff formatting if: ${{ always() }} run: poetry run ruff format --check specsanalyzer specsscan tests @@ -42,3 +42,10 @@ jobs: if: ${{ always() }} run: poetry run mypy specsanalyzer specsscan tests + - name: spellcheck + if: ${{ always() }} + uses: streetsidesoftware/cspell-action@v6 + with: + check_dot_files: false + incremental_files_only: false + config: './cspell.json' diff --git a/.github/workflows/update_dependencies.yml b/.github/workflows/update_dependencies.yml index 03c0515..468adcd 100644 --- a/.github/workflows/update_dependencies.yml +++ b/.github/workflows/update_dependencies.yml @@ -1,4 +1,4 @@ -name: Update depencies in poetry lockfile +name: Update dependencies in poetry lockfile on: schedule: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7f535da..e94afa0 100755 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,3 +42,7 @@ repos: rev: 0.6.0 hooks: - id: nbstripout +- repo: https://github.com/streetsidesoftware/cspell-cli + rev: v6.31.1 + hooks: + - id: cspell From c5409b9eee92ddc388cc1d91940d5c0a489403b1 Mon Sep 17 00:00:00 2001 From: rettigl Date: Sat, 13 Jul 2024 23:39:10 +0200 Subject: [PATCH 3/3] change all to american english --- .cspell/custom-dictionary.txt | 2 -- specsanalyzer/convert.py | 10 +++++----- specsanalyzer/core.py | 12 ++++++------ specsscan/core.py | 2 +- specsscan/helpers.py | 2 +- 5 files changed, 13 insertions(+), 15 deletions(-) diff --git a/.cspell/custom-dictionary.txt b/.cspell/custom-dictionary.txt index cc73d73..ff28973 100644 --- a/.cspell/custom-dictionary.txt +++ b/.cspell/custom-dictionary.txt @@ -2,7 +2,6 @@ allclose ALLUSERSPROFILE amperemeter -analyser arange archiver argwhere @@ -27,7 +26,6 @@ COLLECTIONCOLUMN colorbar COMPES configpath -Croping cropit damatrix dapolymatrix diff --git a/specsanalyzer/convert.py b/specsanalyzer/convert.py index 871bc85..e1173f0 100755 --- a/specsanalyzer/convert.py +++ b/specsanalyzer/convert.py @@ -12,14 +12,14 @@ def get_damatrix_from_calib2d( work_function: float, calib2d_dict: dict, ) -> tuple[float, np.ndarray, float, str, list[str]]: - """This function estimates the best angular conversion coefficients for the current analyser + """This function estimates the best angular conversion coefficients for the current analyzer mode, starting from a dictionary containing the specs .calib2d database. A linear interpolation is performed from the tabulated coefficients based on the retardation ratio value. Args: lens_mode (str): the lens mode string description kinetic_energy (float): kinetic energy of the photoelectron - pass_energy (float): analyser pass energy + pass_energy (float): analyzer pass energy work_function (float): work function settings calib2d_dict (dict): dictionary containing the configuration parameters for angular correction @@ -245,7 +245,7 @@ def calculate_polynomial_coef_da( Args: da_matrix (np.ndarray): the matrix of interpolated da coefficients kinetic_energy (float): photoelectron kinetic energy - pass_energy (float): analyser pass energy + pass_energy (float): analyzer pass energy e_shift (np.ndarray): e shift parameter, defining the energy range around the center for the polynomial fit of the da coefficients @@ -386,8 +386,8 @@ def calculate_matrix_correction( """Calculate the angular and energy interpolation matrices for the correction function. Args: - kinetic_energy (float): analyser set kinetic energy - pass_energy (float): analyser set pass energy + kinetic_energy (float): analyzer set kinetic energy + pass_energy (float): analyzer set pass energy nx_pixels (int): number of image pixels (after binning) along the energy dispersing direction ny_pixels (int): number of image pixels (after binning) along the angle/spatially diff --git a/specsanalyzer/core.py b/specsanalyzer/core.py index bf2266c..88f8406 100755 --- a/specsanalyzer/core.py +++ b/specsanalyzer/core.py @@ -106,9 +106,9 @@ def convert_image( raw_img (np.ndarray): Raw image data, numpy 2d matrix lens_mode (str): analyzer lens mode, check calib2d for a list of modes CamelCase naming convention e.g. "WideAngleMode" - kinetic_energy (float): set analyser kinetic energy - pass_energy (float): set analyser pass energy - work_function (float): set analyser work function + kinetic_energy (float): set analyzer kinetic energy + pass_energy (float): set analyzer pass energy + work_function (float): set analyzer work function conversion_parameters (dict, optional): dictionary of conversion parameters, overwriting determination from calib2d file. Defaults to None. @@ -371,9 +371,9 @@ def crop_tool( raw_img (np.ndarray): Raw image data, numpy 2d matrix lens_mode (str): analyzer lens mode, check calib2d for a list of modes CamelCase naming convention e.g. "WideAngleMode" - kinetic_energy (float): set analyser kinetic energy - pass_energy (float): set analyser pass energy - work_function (float): set analyser work function + kinetic_energy (float): set analyzer kinetic energy + pass_energy (float): set analyzer pass energy + work_function (float): set analyzer work function apply (bool, optional): Option to directly apply the pre-selected cropping parameters. Defaults to False. **kwds: Keyword parameters for the crop tool: diff --git a/specsscan/core.py b/specsscan/core.py index 2831333..8f12d1a 100755 --- a/specsscan/core.py +++ b/specsscan/core.py @@ -288,7 +288,7 @@ def load_scan( return res_xarray def crop_tool(self, scan: int = None, path: Path | str = "", **kwds): - """Croping tool interface to crop_tool method of the SpecsAnalyzer class. + """Cropping tool interface to crop_tool method of the SpecsAnalyzer class. Args: scan (int, optional): Scan number to load data from. Defaults to None. diff --git a/specsscan/helpers.py b/specsscan/helpers.py index d01687c..02e396c 100644 --- a/specsscan/helpers.py +++ b/specsscan/helpers.py @@ -390,7 +390,7 @@ def handle_meta( kinetic_energy = df_lut["KineticEnergy"].to_numpy() if len(set(kinetic_energy)) > 1 and scan_info["ScanType"] == "voltage": - energy_scan_mode = "fixed_analyser_transmission" + energy_scan_mode = "fixed_analyser_transmission" # spell-checker: word: analyser metadata["scan_info"] = complete_dictionary( metadata.get("scan_info", {}),