Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into v1_feature_branch
Browse files Browse the repository at this point in the history
  • Loading branch information
rettigl committed Nov 30, 2024
2 parents 8f4b6bf + d188691 commit 1a28294
Show file tree
Hide file tree
Showing 14 changed files with 2,223 additions and 453 deletions.
5 changes: 5 additions & 0 deletions .cspell/custom-dictionary.txt
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,7 @@ levelname
linalg
linekwds
linesegkwds
linestyles
linewidth
linspace
literalinclude
Expand Down Expand Up @@ -331,6 +332,7 @@ scandir
scatt
scatterkwds
scicat
scipy
SDIAG
sdir
segs
Expand All @@ -350,6 +352,7 @@ subchannels
subdir
subdirs
subfolders
suptitle
symscores
targcenter
termorder
Expand All @@ -376,6 +379,7 @@ TZCYXS
tzoffset
ubid
UDLD
ufunc
unbinned
uncategorised
undoc
Expand All @@ -384,6 +388,7 @@ varnames
venv
verts
viewcode
vlines
vmax
voxels
VTOF
Expand Down
8 changes: 8 additions & 0 deletions config/datasets.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@
],
"rearrange_files": true
},
"W110": {
"url": "https://zenodo.org/records/12609441/files/single_event_data.zip",
"subdirs": [
"analysis_data",
"calibration_data"
],
"rearrange_files": true
},
"TaS2": {
"url": "https://zenodo.org/records/10160182/files/TaS2.zip",
"subdirs": [
Expand Down
22 changes: 22 additions & 0 deletions docs/scripts/build_flash_parquets.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,25 @@
system_config=config_file,
collect_metadata=False,
)

dataset.get("W110", root_dir="./tutorial")
data_path = dataset.dir


config_override = {
"core": {
"paths": {
"data_raw_dir": data_path,
"data_parquet_dir": data_path + "/processed/",
},
},
}

runs = ["44498", "44455"]
for run in runs:
sp = SedProcessor(
runs=run,
config=config_override,
system_config=config_file,
collect_metadata=False,
)
1 change: 1 addition & 0 deletions docs/scripts/download_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@
dataset.get("Gd_W110", remove_zip=True, root_dir=root_dir)
dataset.get("TaS2", remove_zip=True, root_dir=root_dir)
dataset.get("Au_Mica", remove_zip=True, root_dir=root_dir)
dataset.get("W110", remove_zip=True, root_dir=root_dir)
2 changes: 2 additions & 0 deletions docs/user_guide/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,6 @@ config
../tutorial/6_binning_with_time-stamped_data
../tutorial/7_correcting_orthorhombic_symmetry
../tutorial/8_jittering_tutorial
../tutorial/10_hextof_workflow_trXPS_bam_correction
../tutorial/11_hextof_workflow_trXPS_energy_calibration_using_SB
```
3 changes: 2 additions & 1 deletion docs/workflows/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,6 @@ myst:

```{toctree}
../tutorial/4_hextof_workflow
../tutorial/5_sxp_workflow.ipynb
../tutorial/5_sxp_workflow
../tutorial/9_hextof_workflow_trXPD
```
869 changes: 470 additions & 399 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ h5py = ">=3.6.0"
ipympl = ">=0.9.1"
ipywidgets = "^8.1.5"
lmfit = ">=1.0.3"
matplotlib = ">=3.5.1, <3.9.1"
matplotlib = ">=3.5.1"
natsort = ">=8.1.0"
numba = ">=0.55.1"
numpy = ">=1.18, <2.0"
pandas = ">=1.4.1"
psutil = ">=5.9.0"
pynxtools-mpes = "^0.2.0"
pynxtools = "^0.8.0"
pynxtools-mpes = ">=0.2.0"
pynxtools = ">=0.8.0"
pyyaml = ">=6.0.0"
scipy = ">=1.8.0"
symmetrize = ">=0.5.5"
Expand Down
7 changes: 4 additions & 3 deletions src/sed/calibrator/momentum.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from copy import deepcopy
from datetime import datetime
from typing import Any
from typing import Literal

import bokeh.palettes as bp
import bokeh.plotting as pbk
Expand Down Expand Up @@ -1294,7 +1295,7 @@ def calc_inverse_dfield(self):
def view(
self,
image: np.ndarray = None,
origin: str = "lower",
origin: Literal["upper", "lower"] = "lower",
cmap: str = "terrain_r",
figsize: tuple[int, int] = (4, 4),
points: dict = None,
Expand All @@ -1312,8 +1313,8 @@ def view(
Args:
image (np.ndarray, optional): The image to plot. Defaults to self.slice.
origin (str, optional): Figure origin specification ('lower' or 'upper').
Defaults to "lower".
origin (Literal["upper", "lower"], optional): Figure origin specification
('lower' or 'upper'). Defaults to "lower".
cmap (str, optional): Colormap specification. Defaults to "terrain_r".
figsize (tuple[int, int], optional): Figure size. Defaults to (4, 4).
points (dict, optional): Points for annotation. Defaults to None.
Expand Down
17 changes: 9 additions & 8 deletions src/sed/core/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1371,14 +1371,15 @@ def calibrate_energy_axis(
**kwds,
)
if self._verbose:
self.ec.view(
traces=self.ec.traces_normed,
xaxis=self.ec.calibration["axis"],
align=True,
energy_scale=energy_scale,
backend="matplotlib",
title="Quality of Calibration",
)
if self.ec.traces_normed is not None:
self.ec.view(
traces=self.ec.traces_normed,
xaxis=self.ec.calibration["axis"],
align=True,
energy_scale=energy_scale,
backend="matplotlib",
title="Quality of Calibration",
)
plt.xlabel("Energy (eV)")
plt.ylabel("Intensity")
plt.tight_layout()
Expand Down
109 changes: 70 additions & 39 deletions src/sed/loader/mpes/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,20 +104,29 @@ def hdf5_to_dataframe(
)

# Delay-read all files
arrays = [
da.from_delayed(
dask.delayed(hdf5_to_array)(
h5file=h5py.File(f),
channels=electron_channels,
time_stamps=time_stamps,
ms_markers_key=ms_markers_key,
first_event_time_stamp_key=first_event_time_stamp_key,
),
dtype=test_array.dtype,
shape=(test_array.shape[0], np.nan),
)
for f in files
]
arrays = []
for f in files:
try:
arrays.append(
da.from_delayed(
dask.delayed(hdf5_to_array)(
h5file=h5py.File(f),
channels=electron_channels,
time_stamps=time_stamps,
ms_markers_key=ms_markers_key,
first_event_time_stamp_key=first_event_time_stamp_key,
),
dtype=test_array.dtype,
shape=(test_array.shape[0], np.nan),
),
)
except OSError as exc:
if "Unable to synchronously open file" in str(exc):
logger.warning(
f"Unable to open file {f}: {str(exc)}. Most likely the file is incomplete.",
)
pass

array_stack = da.concatenate(arrays, axis=1).T

dataframe = ddf.from_dask_array(array_stack, columns=column_names)
Expand Down Expand Up @@ -211,20 +220,26 @@ def hdf5_to_timed_dataframe(
)

# Delay-read all files
arrays = [
da.from_delayed(
dask.delayed(hdf5_to_timed_array)(
h5file=h5py.File(f),
channels=electron_channels,
time_stamps=time_stamps,
ms_markers_key=ms_markers_key,
first_event_time_stamp_key=first_event_time_stamp_key,
),
dtype=test_array.dtype,
shape=(test_array.shape[0], np.nan),
)
for f in files
]
arrays = []
for f in files:
try:
arrays.append(
da.from_delayed(
dask.delayed(hdf5_to_timed_array)(
h5file=h5py.File(f),
channels=electron_channels,
time_stamps=time_stamps,
ms_markers_key=ms_markers_key,
first_event_time_stamp_key=first_event_time_stamp_key,
),
dtype=test_array.dtype,
shape=(test_array.shape[0], np.nan),
),
)
except OSError as exc:
if "Unable to synchronously open file" in str(exc):
pass

array_stack = da.concatenate(arrays, axis=1).T

dataframe = ddf.from_dask_array(array_stack, columns=column_names)
Expand Down Expand Up @@ -1023,13 +1038,21 @@ def get_count_rate(
count_rate_list = []
accumulated_time = 0
for fid in fids:
count_rate_, secs_ = get_count_rate(
h5py.File(self.files[fid]),
ms_markers_key=ms_markers_key,
)
secs_list.append((accumulated_time + secs_).T)
count_rate_list.append(count_rate_.T)
accumulated_time += secs_[-1]
try:
count_rate_, secs_ = get_count_rate(
h5py.File(self.files[fid]),
ms_markers_key=ms_markers_key,
)
secs_list.append((accumulated_time + secs_).T)
count_rate_list.append(count_rate_.T)
accumulated_time += secs_[-1]
except OSError as exc:
if "Unable to synchronously open file" in str(exc):
logger.warning(
f"Unable to open file {fid}: {str(exc)}. "
"Most likely the file is incomplete.",
)
pass

count_rate = np.concatenate(count_rate_list)
secs = np.concatenate(secs_list)
Expand Down Expand Up @@ -1066,10 +1089,18 @@ def get_elapsed_time(self, fids: Sequence[int] = None, **kwds) -> float:

secs = 0.0
for fid in fids:
secs += get_elapsed_time(
h5py.File(self.files[fid]),
ms_markers_key=ms_markers_key,
)
try:
secs += get_elapsed_time(
h5py.File(self.files[fid]),
ms_markers_key=ms_markers_key,
)
except OSError as exc:
if "Unable to synchronously open file" in str(exc):
logger.warning(
f"Unable to open file {fid}: {str(exc)}. "
"Most likely the file is incomplete.",
)
pass

return secs

Expand Down
Loading

0 comments on commit 1a28294

Please sign in to comment.