Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into v1_feature_branch
Browse files Browse the repository at this point in the history
  • Loading branch information
rettigl committed Jul 2, 2024
2 parents b9afad7 + 2bf9a13 commit 0fdff42
Show file tree
Hide file tree
Showing 14 changed files with 487 additions and 46 deletions.
405 changes: 405 additions & 0 deletions .cspell/custom-dictionary.txt

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,10 @@ jobs:
- name: Install project dependencies
run: poetry install

# Run benchmakrs
- name: Install project dependencies
run: poetry install

# Run benchmarks
- name: Run benchmarks on python 3.8
run: |
poetry run pytest --full-trace --show-capture=no -sv benchmarks/benchmark_*.py
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,22 @@ jobs:
python-version: 3.9
poetry-version: 1.8.3

# Linting steps, excute all linters even if one fails
# Linting steps, execute all linters even if one fails
- name: ruff
run:
poetry run ruff sed tests
- name: ruff formating
- name: ruff formatting
if: ${{ always() }}
run:
poetry run ruff format --check sed tests
- name: mypy
if: ${{ always() }}
run:
poetry run mypy sed tests
- name: spellcheck
if: ${{ always() }}
uses: streetsidesoftware/cspell-action@v6
with:
check_dot_files: false
incremental_files_only: false
config: './cspell.json'
2 changes: 1 addition & 1 deletion .github/workflows/update_dependencies.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Update depencies in poetry lockfile
name: Update dependencies in poetry lockfile

on:
schedule:
Expand Down
4 changes: 4 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,7 @@ repos:
rev: 0.6.0
hooks:
- id: nbstripout
- repo: https://github.com/streetsidesoftware/cspell-cli
rev: v6.31.1
hooks:
- id: cspell
4 changes: 2 additions & 2 deletions benchmarks/Binning Benchmarks.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"source": [
"# Binning demonstration on locally generated fake data\n",
"In this example, we generate a table with random data simulating a single event dataset.\n",
"We showcase the binning method, first on a simple single table using the bin_partition method and then in the distributed mehthod bin_dataframe, using daks dataframes.\n",
"We showcase the binning method, first on a simple single table using the bin_partition method and then in the distributed method bin_dataframe, using daks dataframes.\n",
"The first method is never really called directly, as it is simply the function called by the bin_dataframe on each partition of the dask dataframe."
]
},
Expand Down Expand Up @@ -200,7 +200,7 @@
"metadata": {},
"outputs": [],
"source": [
"data_path = '../../' # Put in Path to a storage of at least 20 Gbyte free space.\n",
"data_path = '../../' # Put in Path to a storage of at least 20 GByte free space.\n",
"if not os.path.exists(data_path + \"/WSe2.zip\"):\n",
" os.system(f\"curl --output {data_path}/WSe2.zip https://zenodo.org/record/6369728/files/WSe2.zip\")\n",
"if not os.path.isdir(data_path + \"/Scan049_1\") or not os.path.isdir(data_path + \"energycal_2019_01_08/\"):\n",
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/mpes_sed_benchmarks.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
"metadata": {},
"outputs": [],
"source": [
"dataPath = '../../' # Put in Path to a storage of at least 20 Gbyte free space.\n",
"dataPath = '../../' # Put in Path to a storage of at least 20 GByte free space.\n",
"if not os.path.exists(dataPath + \"/WSe2.zip\"):\n",
" os.system(f\"curl --output {dataPath}/WSe2.zip https://zenodo.org/record/6369728/files/WSe2.zip\")\n",
"if not os.path.isdir(dataPath + \"/Scan049_1\") or not os.path.isdir(dataPath + \"energycal_2019_01_08/\"):\n",
Expand Down Expand Up @@ -106,7 +106,7 @@
"metadata": {},
"source": [
"## compute distributed binning on the partitioned dask dataframe\n",
"We generated 100 dataframe partiions from the 100 files in the dataset, which we will bin parallelly with the dataframe binning function into a 3D grid"
"We generated 100 dataframe partitions from the 100 files in the dataset, which we will bin parallelly with the dataframe binning function into a 3D grid"
]
},
{
Expand Down
22 changes: 22 additions & 0 deletions cspell.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
{
"version": "0.2",
"ignorePaths": [
"./tests/data/*",
"*.toml",
"Makefile",
"*.bat"
],
"dictionaryDefinitions": [
{
"name": "custom-dictionary",
"path": "./.cspell/custom-dictionary.txt",
"addWords": true
}
],
"dictionaries": [ "custom-dictionary"
],
"words": [],
"ignoreWords": [],
"import": [],
"language": "en-GB, en-US"
}
58 changes: 29 additions & 29 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion sed/config/flash_example_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ dataframe:
dataset_key: "/uncategorised/FLASH.EXP/HEXTOF.DAQ/DLD1/value"
slice: 3

# The auxillary channel has a special structure where the group further contains
# The auxiliary channel has a special structure where the group further contains
# a multidimensional structure so further aliases are defined below
dldAux:
format: per_pulse
Expand Down
6 changes: 3 additions & 3 deletions sed/loader/sxp/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,15 +444,15 @@ def create_dataframe_per_pulse(
DataFrame: The pandas DataFrame for the channel's data.
Notes:
- For auxillary channels, the macrobunch resolved data is repeated 499 times to be
compared to electron resolved data for each auxillary channel. The data is then
- For auxiliary channels, the macrobunch resolved data is repeated 499 times to be
compared to electron resolved data for each auxiliary channel. The data is then
converted to a multicolumn DataFrame.
- For all other pulse resolved channels, the macrobunch resolved data is exploded
to a DataFrame and the MultiIndex is set.
"""

# Special case for auxillary channels
# Special case for auxiliary channels
if channel == "dldAux":
# Checks the channel dictionary for correct slices and creates a multicolumn DataFrame
data_frames = (
Expand Down
4 changes: 2 additions & 2 deletions tests/data/loader/flash/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ dataframe:
slice: 3
dtype: uint32

# The auxillary channel has a special structure where the group further contains
# a multidim structure so further aliases are defined below
# The auxiliary channel has a special structure where the group further contains
# a multidimensional structure so further aliases are defined below
dldAux:
format: per_train
index_key: "/uncategorised/FLASH.EXP/HEXTOF.DAQ/DLD1/index"
Expand Down
4 changes: 2 additions & 2 deletions tests/test_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -999,12 +999,12 @@ def test_get_normalization_histogram() -> None:

metadata: dict[Any, Any] = {}
metadata["entry_title"] = "Title"
# User
# user
metadata["user0"] = {}
metadata["user0"]["name"] = "Name"
metadata["user0"]["email"] = "email"
metadata["user0"]["affiliation"] = "affiliation"
# Instrument
# instrument
metadata["instrument"] = {}
# analyzer
metadata["instrument"]["analyzer"] = {}
Expand Down
2 changes: 1 addition & 1 deletion tutorial/6_binning_with_time-stamped_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "sed-ugcuzRKD-py3.9",
"display_name": "python3",
"language": "python",
"name": "python3"
},
Expand Down

0 comments on commit 0fdff42

Please sign in to comment.