diff --git a/performance-metrics/Makefile b/performance-metrics/Makefile index cce4fd7d93a..fd4dd421ad2 100644 --- a/performance-metrics/Makefile +++ b/performance-metrics/Makefile @@ -25,4 +25,8 @@ clean: .PHONY: wheel wheel: $(python) setup.py $(wheel_opts) bdist_wheel - rm -rf build \ No newline at end of file + rm -rf build + +.PHONY: test +test: + $(pytest) tests \ No newline at end of file diff --git a/performance-metrics/Pipfile b/performance-metrics/Pipfile index df5a3de89d6..a71db703e33 100644 --- a/performance-metrics/Pipfile +++ b/performance-metrics/Pipfile @@ -5,15 +5,17 @@ name = "pypi" [packages] opentrons-shared-data = {file = "../shared-data/python", editable = true} +performance-metrics = {file = ".", editable = true} [dev-packages] -pytest = "==7.2.2" +pytest = "==7.4.4" mypy = "==1.8.0" flake8 = "==7.0.0" flake8-annotations = "~=3.0.1" flake8-docstrings = "~=1.7.0" flake8-noqa = "~=1.4.0" black = "==22.3.0" +pytest-asyncio = "~=0.23.0" [requires] python_version = "3.10" diff --git a/performance-metrics/Pipfile.lock b/performance-metrics/Pipfile.lock index 61556f3dee9..5c836231b7e 100644 --- a/performance-metrics/Pipfile.lock +++ b/performance-metrics/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fa95804888e2d45ce401c98bafc9b543cb6e1afe0a36713660d3f5517ac02b8e" + "sha256": "d811fa2b7dca8a5be8b2dba79ab7200243b2e10fb65f9ee221623f2710b24372" }, "pipfile-spec": 6, "requires": { @@ -37,6 +37,10 @@ "file": "../shared-data/python", "markers": "python_version >= '3.8'" }, + "performance-metrics": { + "editable": true, + "file": "." + }, "pydantic": { "hashes": [ "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de", @@ -333,12 +337,21 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", + "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.4.4" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a", + "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==0.23.6" }, "snowballstemmer": { "hashes": [ diff --git a/performance-metrics/src/performance_metrics/datashapes.py b/performance-metrics/src/performance_metrics/datashapes.py new file mode 100644 index 00000000000..6c855680886 --- /dev/null +++ b/performance-metrics/src/performance_metrics/datashapes.py @@ -0,0 +1,18 @@ +"""Defines data classes and enums used in the performance metrics module.""" + +import dataclasses + + +@dataclasses.dataclass(frozen=True) +class RawDurationData: + """Represents raw duration data for a process or function. + + Attributes: + - function_start_time (int): The start time of the function. + - duration_measurement_start_time (int): The start time for duration measurement. + - duration_measurement_end_time (int): The end time for duration measurement. + """ + + func_start: int + duration_start: int + duration_end: int diff --git a/performance-metrics/src/performance_metrics/function_timer.py b/performance-metrics/src/performance_metrics/function_timer.py new file mode 100644 index 00000000000..2cda334b797 --- /dev/null +++ b/performance-metrics/src/performance_metrics/function_timer.py @@ -0,0 +1,86 @@ +"""This module offers a mechanism for measuring and storing the execution durations of both synchronous and asynchronous functions. + +The FunctionTimer class is intended to be used as a decorator to wrap functions and measure their execution times. It utilizes `perf_counter_ns` for high-resolution performance counter measurements and `clock_gettime_ns(CLOCK_REALTIME)` for real-time clock measurements. The use of `perf_counter_ns` ensures the highest possible resolution timer, which is essential for accurate duration measurement, especially for short-running functions. `clock_gettime_ns(CLOCK_REALTIME)` is used to capture the actual start time in real-world time, which is useful for correlating events or logs with other time-based data. + +""" + +from time import perf_counter_ns, clock_gettime_ns, CLOCK_REALTIME +from types import TracebackType +from typing import ( + Type, + Tuple, +) +from performance_metrics.datashapes import RawDurationData +from contextlib import AbstractAsyncContextManager, AbstractContextManager + + +class FunctionTimer(AbstractAsyncContextManager, AbstractContextManager): # type: ignore + """A decorator class for measuring and storing the execution duration of functions. + + It supports both synchronous and asynchronous functions. + """ + + def __init__(self) -> None: + self._func_start_time: int | None = None + self._duration_start_time: int | None = None + self._duration_end_time: int | None = None + + def _begin_timing(self) -> Tuple[int, int]: + """Starts the timing process, capturing both the current real-time and a high-resolution performance counter. + + Returns: + A tuple containing the current real-time (`clock_gettime_ns(CLOCK_REALTIME)`) and an initial performance counter (`perf_counter_ns()`). Both values are measured in nanoseconds. The combination of these counters allows us to accurately measure execution durations while also correlating these measurements to real-world time. + """ + return clock_gettime_ns(CLOCK_REALTIME), perf_counter_ns() + + def _end_timing(self) -> int: + """Ends the timing process, capturing the final high-resolution performance counter. + + Returns: + The final performance counter, measured in nanoseconds. This value is captured using `perf_counter_ns()` to ensure consistency with the initial performance counter, providing an accurate duration measurement. + """ + return perf_counter_ns() + + def __enter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._func_start_time, self._duration_start_time = self._begin_timing() + return self + + def __exit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._duration_end_time = self._end_timing() + + async def __aenter__(self) -> "FunctionTimer": + """Set the start time of the function.""" + self._func_start_time, self._duration_start_time = self._begin_timing() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Set the end time of the function.""" + self._duration_end_time = self._end_timing() + + def get_data(self) -> RawDurationData: + """Returns the duration data for the function. + + Returns: + RawDurationData: The duration data for the function. + """ + assert self._func_start_time is not None + assert self._duration_start_time is not None + assert self._duration_end_time is not None + + return RawDurationData( + func_start=self._func_start_time, + duration_start=self._duration_start_time, + duration_end=self._duration_end_time, + ) diff --git a/performance-metrics/tests/performance_metrics/test_function_timer.py b/performance-metrics/tests/performance_metrics/test_function_timer.py new file mode 100644 index 00000000000..8a42c1ac203 --- /dev/null +++ b/performance-metrics/tests/performance_metrics/test_function_timer.py @@ -0,0 +1,244 @@ +"""This module contains tests for timing the execution of synchronous and asynchronous functions using the FunctionTimer class. + +It includes functions and their variants that raise exceptions to simulate errors during execution. Each test function is designed +to ensure the FunctionTimer accurately measures execution times and handles exceptions correctly for both synchronous and asynchronous +calls. This serves as a comprehensive suite to validate the functionality of FunctionTimer in various scenarios. +""" +import time +import asyncio +import pytest +from performance_metrics.function_timer import FunctionTimer + + +def synchronous_function() -> None: + """Prints a message indicating a synchronous function is running.""" + print("synchronous_function") + + +def synchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in synchronous execution.""" + print("synchronous_function_with_exception") + raise Exception("An exception") + + +async def asynchronous_function() -> None: + """Prints a message indicating an asynchronous function is running.""" + print("asynchronous_function") + + +async def asynchronous_function_with_exception() -> None: + """Prints a message then raises an exception to simulate error in asynchronous execution.""" + print("asynchronous_function_with_exception") + raise Exception("An exception") + + +async def long_running_task() -> None: + """Simulates a longer running asynchronous task.""" + await asyncio.sleep(2) + + +async def short_running_task() -> None: + """Simulates a shorter running asynchronous task.""" + await asyncio.sleep(0.5) + + +################## +# TEST FUNCTIONS # +################## + + +def test_synchronous_function() -> None: + """Tests that the synchronous function is timed correctly by FunctionTimer.""" + with FunctionTimer() as timer: + synchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +def test_synchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in synchronous functions correctly.""" + with pytest.raises(Exception): + with FunctionTimer() as timer: + synchronous_function_with_exception() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_asynchronous_function() -> None: + """Tests that the asynchronous function is timed correctly by FunctionTimer.""" + async with FunctionTimer() as timer: + await asynchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle exceptions in asynchronous functions correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as timer: + await asynchronous_function_with_exception() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_asynchronous_and_synchronous_function() -> None: + """Tests the timing of a mixed sequence of synchronous and asynchronous functions with FunctionTimer.""" + async with FunctionTimer() as timer: + synchronous_function() + await asynchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_synchronous_and_asynchronous_function_with_exception() -> None: + """Tests that FunctionTimer can handle a mixed sequence of functions, including an exception, correctly.""" + with pytest.raises(Exception): + async with FunctionTimer() as timer: + synchronous_function_with_exception() + await asynchronous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_nested_synchronous_functions() -> None: + """Tests that the FunctionTimer correctly times nested synchronous functions.""" + with FunctionTimer() as outer_timer: + synchronous_function() + with FunctionTimer() as inner_timer: + synchronous_function() + + outer_duration_data = outer_timer.get_data() + inner_duration_data = inner_timer.get_data() + + assert outer_duration_data.duration_start < outer_duration_data.duration_end + assert inner_duration_data.duration_start < inner_duration_data.duration_end + assert outer_duration_data.duration_start < inner_duration_data.duration_start + assert outer_duration_data.duration_end >= inner_duration_data.duration_end + + +async def test_timing_sychronous_function_nested_inside_async_function() -> None: + """Tests that the FunctionTimer correctly times a synchronous function inside an asynchronous context manager.""" + async with FunctionTimer() as async_timer: + await asynchronous_function() + with FunctionTimer() as sync_timer: + synchronous_function() + + async_duration_data = async_timer.get_data() + sync_duration_data = sync_timer.get_data() + + assert async_duration_data.duration_start < async_duration_data.duration_end + assert sync_duration_data.duration_start < sync_duration_data.duration_end + assert async_duration_data.duration_start < sync_duration_data.duration_start + assert async_duration_data.duration_end >= sync_duration_data.duration_end + + +def test_instantaneous_function() -> None: + """Tests that the FunctionTimer can measure the time of an almost instantaneous function.""" + + def instantaneous_function() -> None: + """A function that executes almost instantaneously.""" + pass + + with FunctionTimer() as timer: + instantaneous_function() + + duration_data = timer.get_data() + assert duration_data.duration_start <= duration_data.duration_end + + +def test_known_duration_function() -> None: + """Tests the FunctionTimer's accuracy by comparing with a known sleep duration.""" + sleep_duration = 0.5 + + def known_duration_function() -> None: + time.sleep(sleep_duration) + + with FunctionTimer() as timer: + known_duration_function() + + duration_data = timer.get_data() + measured_duration_nanoseconds = abs( + duration_data.duration_start - duration_data.duration_end + ) + measure_duration_seconds = measured_duration_nanoseconds / 1_000_000_000 + assert abs(measure_duration_seconds - sleep_duration) < 0.05 + + +async def test_async_functions_in_parallel() -> None: + """Tests timing of multiple asynchronous functions executed in parallel.""" + + async def async_sleep_function(duration: float) -> None: + await asyncio.sleep(duration) + + async with FunctionTimer() as timer: + await asyncio.gather( + async_sleep_function(0.5), + async_sleep_function(1), + async_sleep_function(1.5), + ) + + duration_data = timer.get_data() + assert duration_data.duration_start < duration_data.duration_end + + +async def test_function_timer_with_async_contexts() -> None: + """Tests that the FunctionTimer context manager correctly times overlapping asynchronous tasks.""" + # 1. Start long_running_task + # 2. __aenter__ will be called on long_running_task + # 3. Start short_running_task + # 4. __aenter__ will be called on short_running_task + # 5. Finish short_running_task + # 6. __aexit__ will be called on short_running_task + # 7. Finish long_running_task + # 8. __aexit__ will be called on long_running_task + + async with FunctionTimer() as f1_timer: + await long_running_task() + + async with FunctionTimer() as f2_timer: + await short_running_task() + + f1_duration_data = f1_timer.get_data() + f2_duration_data = f2_timer.get_data() + + assert f1_duration_data.duration_start < f1_duration_data.duration_end + assert f2_duration_data.duration_start < f2_duration_data.duration_end + assert f1_duration_data.duration_start < f2_duration_data.duration_start + assert f1_duration_data.duration_end >= f2_duration_data.duration_end + + +def test_direct_use_without_context_manager() -> None: + """Tests the behavior of FunctionTimer when used directly without a context manager block. + + Verifies that the start and end times are not set and that an appropriate assertion is raised when attempting to access them. + """ + timer = FunctionTimer() + assert ( + timer._func_start_time is None + ), "_func_start_time should be None when not used within a context manager" + assert ( + timer._duration_start_time is None + ), "_duration_start_time should be None when not used within a context manager" + assert ( + timer._duration_end_time is None + ), "_duration_end_time should be None when not used within a context manager" + + with pytest.raises(AssertionError): + timer.get_data() + + +def test_calling_get_data_before_context_manager_finishes() -> None: + """Tests that attempting to call get_data before the context manager has properly finished (exited) results in an assertion error. + + This simulates the scenario where get_data is called prematurely, ensuring the timer enforces correct usage patterns. + """ + with pytest.raises(AssertionError): + with FunctionTimer() as timer: + synchronous_function() + timer.get_data()