From d07c9ebc1e811124e8cf0676cf9860abc81fe2eb Mon Sep 17 00:00:00 2001 From: Tobias Jachowski Date: Mon, 8 May 2023 10:25:31 +0200 Subject: [PATCH 1/3] flake8: B028 got renamed to B907. See https://github.com/PyCQA/flake8-bugbear/pull/333 --- .flake8 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.flake8 b/.flake8 index 84d9a80cd..13e7fc933 100644 --- a/.flake8 +++ b/.flake8 @@ -29,6 +29,6 @@ ignore = F403,F405, # Don't complain about f"'{x}'" quotes. `!r` is not particularly readable, especially not for # cases where one would have to resort to f"{str(x)!r}" - B028, + B907, show-source = True From bc39954f7fb27323ca25168c984d5223de89aa83 Mon Sep 17 00:00:00 2001 From: JoepVanlier Date: Fri, 4 Aug 2023 14:39:21 +0200 Subject: [PATCH 2/3] qc: add a utility function for finding stack level --- lumicks/pylake/detail/utilities.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/lumicks/pylake/detail/utilities.py b/lumicks/pylake/detail/utilities.py index 5816a7dcb..04a1be4b2 100644 --- a/lumicks/pylake/detail/utilities.py +++ b/lumicks/pylake/detail/utilities.py @@ -1,5 +1,7 @@ import math import contextlib +import pathlib +import inspect import numpy as np import cachetools @@ -213,3 +215,22 @@ def temp_seed(seed): yield finally: np.random.seed(None) + + +def find_stack_level() -> int: + """Find where we leave the module""" + import lumicks.pylake as lk + + pylake_folder = pathlib.Path(lk.__file__).parent + + depth = 0 + frame = inspect.currentframe() + while frame: + current_path = pathlib.Path(inspect.getfile(frame)) + if pylake_folder in current_path.parents and "tests" not in current_path.parts: + frame = frame.f_back + depth += 1 + else: + break + + return depth From ea89a78f0cf70fa45fc5800513972fe684511dc3 Mon Sep 17 00:00:00 2001 From: JoepVanlier Date: Fri, 4 Aug 2023 14:39:04 +0200 Subject: [PATCH 3/3] cq: update warnings with automatic stack_level --- lumicks/pylake/detail/confocal.py | 5 +-- lumicks/pylake/detail/image.py | 6 +++- lumicks/pylake/detail/plotting.py | 6 ++-- lumicks/pylake/detail/utilities.py | 4 +-- lumicks/pylake/detail/widefield.py | 3 +- lumicks/pylake/file.py | 3 +- .../fitting/detail/derivative_manipulation.py | 5 ++- .../fitting/detail/model_implementation.py | 7 ++-- .../force_calibration/power_spectrum.py | 7 ++-- lumicks/pylake/force_calibration/touchdown.py | 11 ++++--- lumicks/pylake/group.py | 9 +++-- lumicks/pylake/image_stack.py | 4 ++- lumicks/pylake/kymo.py | 13 +++++--- .../kymotracker/detail/msd_estimation.py | 21 ++++++++---- lumicks/pylake/kymotracker/kymotrack.py | 33 +++++++++++-------- lumicks/pylake/kymotracker/kymotracker.py | 9 +++-- lumicks/pylake/nb_widgets/correlated_plot.py | 7 +++- .../pylake/nb_widgets/kymotracker_widgets.py | 5 ++- .../pylake/piezo_tracking/piezo_tracking.py | 4 ++- lumicks/pylake/population/detail/hmm.py | 8 +++-- 20 files changed, 118 insertions(+), 52 deletions(-) diff --git a/lumicks/pylake/detail/confocal.py b/lumicks/pylake/detail/confocal.py index 0c2916085..bbe1e414a 100644 --- a/lumicks/pylake/detail/confocal.py +++ b/lumicks/pylake/detail/confocal.py @@ -9,7 +9,7 @@ from .image import reconstruct_image, reconstruct_image_sum from .mixin import PhotonCounts, ExcitationLaserPower from .plotting import parse_color_channel -from .utilities import method_cache, could_sum_overflow +from .utilities import method_cache, find_stack_level, could_sum_overflow from ..adjustments import no_adjustment from .imaging_mixins import TiffExport @@ -350,7 +350,8 @@ def _tiff_image_metadata(self) -> dict: except NotImplementedError: warnings.warn( f"Pixel times are not defined for this {self.__class__.__name__}. " - "The corresponding metadata in the output file is set to `None`." + "The corresponding metadata in the output file is set to `None`.", + stacklevel=find_stack_level(), ) pixel_time_seconds = None diff --git a/lumicks/pylake/detail/image.py b/lumicks/pylake/detail/image.py index 247339921..c7bec5487 100644 --- a/lumicks/pylake/detail/image.py +++ b/lumicks/pylake/detail/image.py @@ -4,6 +4,8 @@ import numpy as np +from .utilities import find_stack_level + class InfowaveCode(enum.IntEnum): discard = 0 # this data sample does not contain useful information @@ -197,7 +199,9 @@ def histogram_rows(image, pixels_per_bin, pixel_width): remainder = n_rows % pixels_per_bin if remainder != 0: warnings.warn( - f"{n_rows} pixels is not divisible by {pixels_per_bin}, final bin only contains {remainder} pixels" + f"{n_rows} pixels is not divisible by {pixels_per_bin}, final bin only contains " + f"{remainder} pixels", + stacklevel=find_stack_level(), ) pad = np.zeros((pixels_per_bin - remainder, image.shape[1])) image = np.vstack((image, pad)) diff --git a/lumicks/pylake/detail/plotting.py b/lumicks/pylake/detail/plotting.py index 8f519b121..26b228bd1 100644 --- a/lumicks/pylake/detail/plotting.py +++ b/lumicks/pylake/detail/plotting.py @@ -1,6 +1,7 @@ import warnings from ..adjustments import no_adjustment +from ..detail.utilities import find_stack_level def get_axes(axes=None, image_handle=None): @@ -36,8 +37,9 @@ def parse_color_channel(channel): warnings.warn( DeprecationWarning( "In future versions, the `channel` argument will be restricted to lowercase " - f"letters only. Use '{channel}' instead of '{input_channel}'." - ) + f"letters only. Use '{channel}' instead of '{input_channel}'.", + ), + stacklevel=find_stack_level(), ) # check rgb order diff --git a/lumicks/pylake/detail/utilities.py b/lumicks/pylake/detail/utilities.py index 04a1be4b2..516b355cd 100644 --- a/lumicks/pylake/detail/utilities.py +++ b/lumicks/pylake/detail/utilities.py @@ -1,7 +1,7 @@ import math -import contextlib -import pathlib import inspect +import pathlib +import contextlib import numpy as np import cachetools diff --git a/lumicks/pylake/detail/widefield.py b/lumicks/pylake/detail/widefield.py index 60539514d..3d324120b 100644 --- a/lumicks/pylake/detail/widefield.py +++ b/lumicks/pylake/detail/widefield.py @@ -9,6 +9,7 @@ import tifffile from .plotting import parse_color_channel +from .utilities import find_stack_level from ..adjustments import no_adjustment @@ -205,7 +206,7 @@ def __init__(self, tiff_files, align_requested, roi=None, tether=None): # warn on file open if alignment is requested, but not possible # stacklevel=4 corresponds to ImageStack.__init__() if self._description._alignment.has_problem: - warnings.warn(self._description._alignment.status.value, stacklevel=4) + warnings.warn(self._description._alignment.status.value, stacklevel=find_stack_level()) if roi is None: self._roi = Roi(0, self._description.width, 0, self._description.height) diff --git a/lumicks/pylake/file.py b/lumicks/pylake/file.py index 58e9ecf3e..f9f35a072 100644 --- a/lumicks/pylake/file.py +++ b/lumicks/pylake/file.py @@ -14,6 +14,7 @@ from .calibration import ForceCalibration from .detail.mixin import Force, PhotonCounts, DownsampledFD, PhotonTimeTags, BaselineCorrectedForce from .detail.h5_helper import write_h5 +from .detail.utilities import find_stack_level __all__ = ["File"] @@ -276,7 +277,7 @@ def try_from_dataset(*args): try: return cls.from_dataset(*args) except Exception as e: - warnings.warn(e.args[0]) + warnings.warn(e.args[0], stacklevel=find_stack_level()) return None if field not in self.h5: diff --git a/lumicks/pylake/fitting/detail/derivative_manipulation.py b/lumicks/pylake/fitting/detail/derivative_manipulation.py index 630847d1c..fade8af2b 100644 --- a/lumicks/pylake/fitting/detail/derivative_manipulation.py +++ b/lumicks/pylake/fitting/detail/derivative_manipulation.py @@ -3,6 +3,8 @@ import numpy as np import scipy +from ...detail.utilities import find_stack_level + def numerical_diff(fn, x, dx=1e-6): return (fn(x + dx) - fn(x - dx)) / (2.0 * dx) @@ -126,7 +128,8 @@ def invert_function_interpolation( except Exception as e: warnings.warn( f"Interpolation failed. Cause: {e}. Falling back to brute force evaluation. " - f"Results should be fine, but slower." + f"Results should be fine, but slower.", + stacklevel=find_stack_level(), ) result[interpolated_idx] = manual_inversion(d[interpolated_idx], initial) else: diff --git a/lumicks/pylake/fitting/detail/model_implementation.py b/lumicks/pylake/fitting/detail/model_implementation.py index a6b33566c..4b63be3d0 100644 --- a/lumicks/pylake/fitting/detail/model_implementation.py +++ b/lumicks/pylake/fitting/detail/model_implementation.py @@ -4,6 +4,7 @@ from .utilities import latex_frac, latex_sqrt, solve_formatter, solve_formatter_tex from ..parameters import Parameter +from ...detail.utilities import find_stack_level from .derivative_manipulation import invert_function, invert_jacobian, invert_function_interpolation @@ -103,8 +104,10 @@ def wlc_marko_siggia_force(d, Lp, Lc, kT): if np.any(d > Lc): warnings.warn( - "Marko Siggia model is only defined properly up to the contour length (d = Lc)", - RuntimeWarning, + RuntimeWarning( + "Marko Siggia model is only defined properly up to the contour length (d = Lc)" + ), + stacklevel=find_stack_level(), ) d_div_Lc = d / Lc diff --git a/lumicks/pylake/force_calibration/power_spectrum.py b/lumicks/pylake/force_calibration/power_spectrum.py index f776bb75c..194810ea3 100644 --- a/lumicks/pylake/force_calibration/power_spectrum.py +++ b/lumicks/pylake/force_calibration/power_spectrum.py @@ -4,7 +4,7 @@ import numpy as np -from lumicks.pylake.detail.utilities import downsample +from lumicks.pylake.detail.utilities import downsample, find_stack_level class PowerSpectrum: @@ -52,7 +52,10 @@ def squared_fft(d): int(np.round(window_seconds * sample_rate)) if window_seconds else len(data) ) if num_points_per_window > len(data): - warnings.warn(RuntimeWarning("Longer window than data duration: not using windowing.")) + warnings.warn( + RuntimeWarning("Longer window than data duration: not using windowing."), + stacklevel=find_stack_level(), + ) num_points_per_window = len(data) squared_fft_chunks = [ diff --git a/lumicks/pylake/force_calibration/touchdown.py b/lumicks/pylake/force_calibration/touchdown.py index 6e1808636..a18a921bd 100644 --- a/lumicks/pylake/force_calibration/touchdown.py +++ b/lumicks/pylake/force_calibration/touchdown.py @@ -4,7 +4,7 @@ import numpy as np import scipy -from lumicks.pylake.detail.utilities import downsample +from lumicks.pylake.detail.utilities import downsample, find_stack_level def mack_model( @@ -82,7 +82,8 @@ def f_test(sse_restricted, sse_unrestricted, num_data, num_pars_difference, num_ RuntimeWarning( "Denominator in F-Test is zero. " "This may be caused by using noise-free data or fewer than 4 data points." - ) + ), + stacklevel=find_stack_level(), ) return 0.0 else: @@ -310,7 +311,8 @@ def touchdown( RuntimeWarning( "Insufficient data available to reliably fit touchdown curve. We need at least two " "oscillations to reliably fit the interference pattern." - ) + ), + stacklevel=find_stack_level(), ) focal_shift = None @@ -318,7 +320,8 @@ def touchdown( warnings.warn( RuntimeWarning( "Surface detection failed (piecewise linear fit not better than linear fit)" - ) + ), + stacklevel=find_stack_level(), ) surface_position = None diff --git a/lumicks/pylake/group.py b/lumicks/pylake/group.py index 65c0f289b..d0cf9479c 100644 --- a/lumicks/pylake/group.py +++ b/lumicks/pylake/group.py @@ -1,6 +1,7 @@ import warnings from .channel import channel_class +from .detail.utilities import find_stack_level class Group: @@ -34,9 +35,11 @@ def __getitem__(self, item): redirect_location, redirect_class = self._lk_file.redirect_list.get(item_type, (None, None)) if redirect_location and not redirect_class: warnings.warn( - f"Direct access to this field is deprecated. Use file.{redirect_location} " - "instead. In case raw access is needed, go through the fn.h5 directly.", - FutureWarning, + FutureWarning( + f"Direct access to this field is deprecated. Use file.{redirect_location} " + "instead. In case raw access is needed, go through the fn.h5 directly.", + ), + stacklevel=find_stack_level(), ) if type(thing) is h5py.Group: diff --git a/lumicks/pylake/image_stack.py b/lumicks/pylake/image_stack.py index 8fc80a559..aab1f70d6 100644 --- a/lumicks/pylake/image_stack.py +++ b/lumicks/pylake/image_stack.py @@ -10,6 +10,7 @@ from .adjustments import no_adjustment from .detail.image import make_image_title from .detail.plotting import get_axes, show_image +from .detail.utilities import find_stack_level from .detail.widefield import TiffStack, _frame_timestamps_from_exposure_timestamps from .detail.imaging_mixins import FrameIndex, TiffExport, VideoExport @@ -658,7 +659,8 @@ def frame_timestamp_ranges(self, *, include_dead_time=False): "lag behind. This means that when you average data over the frame, some frames " "after the switch may take an incorrect exposure time into account in the " "averaging." - ) + ), + stacklevel=find_stack_level(), ) return frame_timestamps diff --git a/lumicks/pylake/kymo.py b/lumicks/pylake/kymo.py index f1d10014a..daddb75c6 100644 --- a/lumicks/pylake/kymo.py +++ b/lumicks/pylake/kymo.py @@ -15,7 +15,7 @@ from .detail.confocal import ScanAxis, ScanMetaData, ConfocalImage from .detail.plotting import get_axes, show_image from .detail.timeindex import to_timestamp -from .detail.utilities import method_cache +from .detail.utilities import method_cache, find_stack_level from .detail.bead_cropping import find_beads_template, find_beads_brightness @@ -279,8 +279,10 @@ def _fix_incorrect_start(self): self.start = seek_timestamp_next_line(self.infowave[self.start :]) self._cache = {} warnings.warn( - "Start of the kymograph was truncated. Omitting the truncated first line.", - RuntimeWarning, + RuntimeWarning( + "Start of the kymograph was truncated. Omitting the truncated first line." + ), + stacklevel=find_stack_level(), ) def _to_spatial(self, data): @@ -475,7 +477,10 @@ def set_aspect_ratio(axis, ar): ) warnings.warn( - RuntimeWarning("Using downsampled force since high frequency force is unavailable.") + RuntimeWarning( + "Using downsampled force since high frequency force is unavailable." + ), + stacklevel=find_stack_level(), ) time_ranges = self.line_timestamp_ranges(include_dead_time=False) diff --git a/lumicks/pylake/kymotracker/detail/msd_estimation.py b/lumicks/pylake/kymotracker/detail/msd_estimation.py index 370a63e8e..4003c99af 100644 --- a/lumicks/pylake/kymotracker/detail/msd_estimation.py +++ b/lumicks/pylake/kymotracker/detail/msd_estimation.py @@ -5,6 +5,8 @@ import numpy as np import numpy.typing as npt +from ...detail.utilities import find_stack_level + @dataclass(frozen=True) class DiffusionEstimate: @@ -472,7 +474,10 @@ def _diffusion_gls(lag_idx, mean_squared_displacements, num_points, tolerance=1e def fallback(warning_message): """Fallback method if the GLS fails""" - warnings.warn(RuntimeWarning(f"{warning_message} Reverting to two-point OLS.")) + warnings.warn( + RuntimeWarning(f"{warning_message} Reverting to two-point OLS."), + stacklevel=find_stack_level(), + ) return _diffusion_ols(lag_idx[:2], mean_squared_displacements[:2], num_points) # Since the covariance matrix depends on the parameters for the intercept and slope, we obtain @@ -600,7 +605,7 @@ def estimate_diffusion_constant_simple( "`help(lk.refine_tracks_centroid)` or `help(lk.refine_tracks_gaussian)` for " "more information." ), - stacklevel=2, + stacklevel=find_stack_level(), ) frame_lags, msd = calculate_msd(frame_idx, coordinate, max_lag) @@ -718,7 +723,8 @@ def determine_optimal_points(frame_idx, coordinate, max_iterations=100): RuntimeWarning( "Your tracks have missing frames. Note that this can lead to a suboptimal " "estimate of the optimal number of lags when using OLS." - ) + ), + stacklevel=find_stack_level(), ) num_slope = max(2, len(coordinate) // 10) # Need at least two points for a linear regression! @@ -750,7 +756,8 @@ def determine_optimal_points(frame_idx, coordinate, max_iterations=100): return num_slope, num_intercept warnings.warn( - RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution.") + RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution."), + stacklevel=find_stack_level(), ) return num_slope, num_intercept @@ -1129,7 +1136,8 @@ def _determine_optimal_points_ensemble(frame_lags, msds, n_coord, max_iterations return num_slope warnings.warn( - RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution.") + RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution."), + stacklevel=find_stack_level(), ) return num_slope @@ -1143,7 +1151,8 @@ def ensemble_ols(kymotracks, max_lag): warnings.warn( RuntimeWarning( "Your tracks have missing frames. Note that this can lead to a suboptimal estimates" - ) + ), + stacklevel=find_stack_level(), ) optimal_lags = ( diff --git a/lumicks/pylake/kymotracker/kymotrack.py b/lumicks/pylake/kymotracker/kymotrack.py index 5522c72f9..e774c9e74 100644 --- a/lumicks/pylake/kymotracker/kymotrack.py +++ b/lumicks/pylake/kymotracker/kymotrack.py @@ -4,7 +4,7 @@ from copy import copy from ..__about__ import __version__ -from ..detail.utilities import replace_key_aliases +from ..detail.utilities import find_stack_level, replace_key_aliases from .detail.peakfinding import _sum_track_signal from ..population.dwelltime import DwelltimeModel from .detail.msd_estimation import * @@ -154,7 +154,8 @@ def store_column(column_title, format_string, new_data): "greater or equal to the minimum length used for the original tracking. For more " "information refer to " "https://lumicks-pylake.readthedocs.io/en/latest/tutorial/nbwidgets.html#migrating-old-track-files " - ) + ), + stacklevel=find_stack_level(), ) version_header = f"Exported with pylake v{__version__} | track coordinates v4\n" @@ -207,7 +208,7 @@ def import_kymotrackgroup_from_csv(filename, kymo, channel, delimiter=";"): "File contains non-integer time indices; round-off errors may have occurred " "when loading the data" ), - stacklevel=2, + stacklevel=find_stack_level(), ) def create_track(time, coord, min_length=None): @@ -226,7 +227,8 @@ def create_track(time, coord, min_length=None): "greater or equal to the minimum length used for the original tracking. For more " "information refer to " "https://lumicks-pylake.readthedocs.io/en/latest/tutorial/nbwidgets.html#migrating-old-track-files" - ) + ), + stacklevel=find_stack_level(), ) min_duration_field = "minimum_length (-)" else: @@ -592,7 +594,7 @@ def sample_from_image(self, num_pixels, reduce=np.sum, *, correct_origin=None): "ensure backward compatibility. To silence this warning use " "`correct_origin=False`." ), - stacklevel=2, + stacklevel=find_stack_level(), ) # Time and coordinates are being cast to an integer since we use them to index into a data @@ -905,7 +907,8 @@ def estimate_diffusion( RuntimeWarning( "Motion blur cannot be taken into account for this type of Kymo. As a " "consequence, not all estimates will be available." - ) + ), + stacklevel=find_stack_level(), ) blur = np.nan @@ -1634,7 +1637,8 @@ def fit_binding_times( "warning, but use the deprecated behavior use `observed_minimum=True`. To " "enable the recommended method of estimating the minimum observable dwell " "time use `observed_minimum=False`." - ) + ), + stacklevel=find_stack_level(), ) observed_minimum = True @@ -1649,7 +1653,8 @@ def fit_binding_times( "version of Pylake (`2.0.0`), using the discrete model will become the " "default. Until then, the continuous model is still used for backward " "compatibility." - ) + ), + stacklevel=find_stack_level(), ) discrete_model = False @@ -1678,7 +1683,7 @@ def fit_binding_times( "dropped from the analysis. If you wish to not see this warning, filter the " "tracks with `lk.filter_tracks` with a minimum length of 2 samples." ), - stacklevel=2, + stacklevel=find_stack_level(), ) if dwelltimes.size == 0: @@ -1861,10 +1866,11 @@ def estimate_diffusion(self, method, *args, min_length=None, **kwargs): if n_discarded and min_length is None: warnings.warn( - f"{n_discarded} tracks were shorter than the specified min_length " - "and discarded from the analysis.", - RuntimeWarning, - stacklevel=2, + RuntimeWarning( + f"{n_discarded} tracks were shorter than the specified min_length and " + f"discarded from the analysis.", + ), + stacklevel=find_stack_level(), ) return [k.estimate_diffusion(method, *args, **kwargs) for k in filtered_tracks] @@ -1925,6 +1931,7 @@ def ensemble_diffusion(self, method, *, max_lag=None): "Localization variances cannot be reliably calculated for an ensemble of " "tracks from kymographs with different line times or pixel sizes." ), + stacklevel=find_stack_level(), ) is_valid = False return ensemble_cve(self, calculate_localization_var=is_valid) diff --git a/lumicks/pylake/kymotracker/kymotracker.py b/lumicks/pylake/kymotracker/kymotracker.py index 4b21d6b24..d84441f57 100644 --- a/lumicks/pylake/kymotracker/kymotracker.py +++ b/lumicks/pylake/kymotracker/kymotracker.py @@ -4,6 +4,7 @@ import numpy as np from .kymotrack import KymoTrack, KymoTrackGroup +from ..detail.utilities import find_stack_level from .detail.peakfinding import find_kymograph_peaks, refine_peak_based_on_moment from .detail.gaussian_mle import gaussian_mle_1d, overlapping_pixels from .detail.trace_line_2d import detect_lines, points_to_line_segments @@ -564,7 +565,8 @@ def refine_tracks_gaussian( "track when a track momentarily disappears. When using the overlap strategy " '"multiple" individual overlapping Gaussians could switch positions leading to' "spurious track crossings." - ) + ), + stacklevel=find_stack_level(), ) if len(tracks._kymos) > 1: @@ -650,7 +652,10 @@ def refine_tracks_gaussian( if overlap_count and overlap_strategy != "ignore": warnings.warn( - f"There were {overlap_count} instances of overlapped tracks ignored while fitting." + UserWarning( + f"There were {overlap_count} instances of overlapped tracks ignored while fitting." + ), + stacklevel=find_stack_level(), ) return KymoTrackGroup( diff --git a/lumicks/pylake/nb_widgets/correlated_plot.py b/lumicks/pylake/nb_widgets/correlated_plot.py index 46d49bfbf..fc4ee01c5 100644 --- a/lumicks/pylake/nb_widgets/correlated_plot.py +++ b/lumicks/pylake/nb_widgets/correlated_plot.py @@ -2,6 +2,8 @@ import numpy as np +from ..detail.utilities import find_stack_level + def plot_correlated( channel_slice, @@ -52,7 +54,10 @@ def plot_correlated( downsampled = channel_slice.downsampled_over(frame_timestamps, where="left", reduce=reduce) if len(downsampled.timestamps) < len(frame_timestamps): - warnings.warn("Only subset of time range available for selected channel") + warnings.warn( + UserWarning("Only subset of time range available for selected channel"), + stacklevel=find_stack_level(), + ) plot_data = get_plot_data(frame) aspect_ratio = plot_data.shape[0] / np.max([plot_data.shape]) diff --git a/lumicks/pylake/nb_widgets/kymotracker_widgets.py b/lumicks/pylake/nb_widgets/kymotracker_widgets.py index c5032c555..6d5567704 100644 --- a/lumicks/pylake/nb_widgets/kymotracker_widgets.py +++ b/lumicks/pylake/nb_widgets/kymotracker_widgets.py @@ -12,6 +12,8 @@ from lumicks.pylake.nb_widgets.detail.mouse import MouseDragCallback from lumicks.pylake.nb_widgets.detail.undostack import UndoStack +from ..detail.utilities import find_stack_level + class KymoWidget: def __init__( @@ -343,7 +345,8 @@ def save_tracks(self, filename, delimiter=";", sampling_width=None): "when opening the kymotracking widget. The old (incorrect) behavior is " "maintained until the next major release to ensure backward compatibility. " "To silence this warning use `correct_origin=False`." - ) + ), + stacklevel=find_stack_level(), ) self._set_label( "warning", diff --git a/lumicks/pylake/piezo_tracking/piezo_tracking.py b/lumicks/pylake/piezo_tracking/piezo_tracking.py index 72e5758cd..3e20302db 100644 --- a/lumicks/pylake/piezo_tracking/piezo_tracking.py +++ b/lumicks/pylake/piezo_tracking/piezo_tracking.py @@ -4,6 +4,7 @@ from ..channel import Slice from .baseline import ForceBaseLine +from ..detail.utilities import find_stack_level __all__ = ["DistanceCalibration", "PiezoTrackingCalibration", "PiezoForceDistance"] @@ -34,7 +35,8 @@ def __init__(self, trap_position, camera_distance, degree=1): RuntimeWarning( "There were frames with missing video tracking: " f"{missed_frames} data point(s) were omitted." - ) + ), + stacklevel=find_stack_level(), ) coeffs = np.polyfit(self.position, self.distance, degree) diff --git a/lumicks/pylake/population/detail/hmm.py b/lumicks/pylake/population/detail/hmm.py index 962413e0b..d534887c2 100644 --- a/lumicks/pylake/population/detail/hmm.py +++ b/lumicks/pylake/population/detail/hmm.py @@ -17,6 +17,7 @@ from .fit_info import PopulationFitInfo from ...channel import Slice, Continuous from .validators import col, row +from ...detail.utilities import find_stack_level def normalize_rows(matrix): @@ -152,8 +153,11 @@ def baum_welch(data, model, tol, max_iter): if not converged: warnings.warn( - f"Model has not converged after {_itr} iterations. Last log likelihood step " - f"was {delta:0.4e}." + RuntimeWarning( + f"Model has not converged after {_itr} iterations. Last log likelihood step " + f"was {delta:0.4e}." + ), + stacklevel=find_stack_level(), ) # free parameters; pi and each row of A constrained to sum to 1