Skip to content

Commit

Permalink
Refactor optimisation args (#551)
Browse files Browse the repository at this point in the history
* refactor: moves common Pints' optimisation keys from kwargs to optional args, aligns maxiter and popsize to standardised name

* fix: benchmarks, coverage

fix: benchmarks

* fix: attribute selection in Optimisation class

* adds changelog entry
  • Loading branch information
BradyPlanden committed Dec 4, 2024
1 parent 3080e46 commit 3daba29
Show file tree
Hide file tree
Showing 10 changed files with 501 additions and 196 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
## Optimisations

- [#512](https://github.com/pybop-team/PyBOP/pull/513) - Refactors `LogPosterior` with attributes pointing to composed likelihood object.
- [#551](https://github.com/pybop-team/PyBOP/pull/551) - Refactors Optimiser arguments, `population_size` and `max_iterations` as default args, improves optimiser docstrings

## Bug Fixes

Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmark_optim_construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def setup(self, model, parameter_set, optimiser):
Args:
model (pybop.Model): The model class to be benchmarked.
parameter_set (str): The name of the parameter set to be used.
optimiser (pybop.Optimiser): The optimizer class to be used.
optimiser (pybop.Optimiser): The optimiser class to be used.
"""
# Set random seed
set_random_seed()
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmark_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,4 +124,4 @@ def time_optimiser_ask(self, model, parameter_set, optimiser):
optimiser (pybop.Optimiser): The optimizer class being used.
"""
if optimiser not in [pybop.SciPyMinimize, pybop.SciPyDifferentialEvolution]:
self.optim.pints_optimiser.ask()
self.optim.optimiser.ask()
4 changes: 2 additions & 2 deletions benchmarks/benchmark_track_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def setup(self, model, parameter_set, optimiser):
Args:
model (pybop.Model): The model class to be benchmarked.
parameter_set (str): The name of the parameter set to be used.
optimiser (pybop.Optimiser): The optimizer class to be used.
optimiser (pybop.Optimiser): The optimiser class to be used.
"""
# Set random seed
set_random_seed()
Expand Down Expand Up @@ -121,7 +121,7 @@ def results_tracking(self, model, parameter_set, optimiser):
Args:
model (pybop.Model): The model class being benchmarked (unused).
parameter_set (str): The name of the parameter set being used (unused).
optimiser (pybop.Optimiser): The optimizer class being used (unused).
optimiser (pybop.Optimiser): The optimiser class being used (unused).
"""
results = self.optim.run()
return results.x
2 changes: 1 addition & 1 deletion examples/notebooks/optimiser_calibration.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -610,7 +610,7 @@
"source": [
"for optim, sigma in zip(optims, sigmas):\n",
" print(\n",
" f\"| Sigma: {sigma} | Num Iterations: {optim.result.n_iterations} | Best Cost: {optim.pints_optimiser.f_best()} | Results: {optim.pints_optimiser.x_best()} |\"\n",
" f\"| Sigma: {sigma} | Num Iterations: {optim.result.n_iterations} | Best Cost: {optim.optimiser.f_best()} | Results: {optim.optimiser.x_best()} |\"\n",
" )"
]
},
Expand Down
152 changes: 74 additions & 78 deletions pybop/optimisers/base_pints_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,18 @@ class BasePintsOptimiser(BaseOptimiser):
Parameters
----------
cost : callable
The cost function to be minimized.
pints_optimiser : class
The PINTS optimiser class to be used.
max_iterations : int, optional
Maximum number of iterations for the optimisation.
min_iterations : int, optional (default=2)
Minimum number of iterations before termination.
max_unchanged_iterations : int, optional (default=15)
Maximum number of iterations without improvement before termination.
parallel : bool, optional (default=False)
Whether to run the optimisation in parallel.
**optimiser_kwargs : optional
Valid PINTS option keys and their values, for example:
x0 : array_like
Expand All @@ -30,27 +42,51 @@ class BasePintsOptimiser(BaseOptimiser):
bounds : dict
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
upper bounds on the parameters.
use_f_guessed : bool
Whether to track guessed function values.
absolute_tolerance : float
Absolute tolerance for convergence checking.
relative_tolerance : float
Relative tolerance for convergence checking.
max_evaluations : int
Maximum number of function evaluations.
threshold : float
Threshold value for early termination.
"""

def __init__(self, cost, pints_optimiser, **optimiser_kwargs):
def __init__(
self,
cost,
pints_optimiser,
max_iterations: int = None,
min_iterations: int = 2,
max_unchanged_iterations: int = 15,
parallel: bool = False,
**optimiser_kwargs,
):
# First set attributes to default values
self._boundaries = None
self._needs_sensitivities = None
self._use_f_guessed = None
self._parallel = False
self._n_workers = 1
self._callback = None
self._max_iterations = None
self._min_iterations = 2
self._unchanged_max_iterations = 15
self.set_parallel(parallel)
self.set_max_iterations(max_iterations)
self.set_min_iterations(min_iterations)
self._unchanged_max_iterations = max_unchanged_iterations
self._absolute_tolerance = 1e-5
self._relative_tolerance = 1e-2
self._max_evaluations = None
self._threshold = None
self._evaluations = None
self._iterations = None
self.option_methods = {
"use_f_guessed": self.set_f_guessed_tracking,
"max_evaluations": self.set_max_evaluations,
"threshold": self.set_threshold,
}

self.pints_optimiser = pints_optimiser
self.optimiser = pints_optimiser
super().__init__(cost, **optimiser_kwargs)

def _set_up_optimiser(self):
Expand All @@ -61,47 +97,26 @@ def _set_up_optimiser(self):
self._sanitise_inputs()

# Create an instance of the PINTS optimiser class
if issubclass(self.pints_optimiser, PintsOptimiser):
self.pints_optimiser = self.pints_optimiser(
if issubclass(self.optimiser, PintsOptimiser):
self.optimiser = self.optimiser(
self.x0, sigma0=self.sigma0, boundaries=self._boundaries
)
else:
raise ValueError(
"The pints_optimiser is not a recognised PINTS optimiser class."
)
raise ValueError("The optimiser is not a recognised PINTS optimiser class.")

# Check if sensitivities are required
self._needs_sensitivities = self.pints_optimiser.needs_sensitivities()

# Apply default maxiter
self.set_max_iterations()
self._needs_sensitivities = self.optimiser.needs_sensitivities()

# Apply additional options and remove them from options
key_list = list(self.unset_options.keys())
for key in key_list:
if key == "use_f_guessed":
self.set_f_guessed_tracking(self.unset_options.pop(key))
elif key == "parallel":
self.set_parallel(self.unset_options.pop(key))
elif key == "max_iterations":
self.set_max_iterations(self.unset_options.pop(key))
elif key == "min_iterations":
self.set_min_iterations(self.unset_options.pop(key))
elif key == "max_unchanged_iterations":
max_unchanged_kwargs = {"iterations": self.unset_options.pop(key)}
if "absolute_tolerance" in self.unset_options.keys():
max_unchanged_kwargs["absolute_tolerance"] = self.unset_options.pop(
"absolute_tolerance"
)
if "relative_tolerance" in self.unset_options.keys():
max_unchanged_kwargs["relative_tolerance"] = self.unset_options.pop(
"relative_tolerance"
)
self.set_max_unchanged_iterations(**max_unchanged_kwargs)
elif key == "max_evaluations":
self.set_max_evaluations(self.unset_options.pop(key))
elif key == "threshold":
self.set_threshold(self.unset_options.pop(key))
max_unchanged_kwargs = {"iterations": self._unchanged_max_iterations}
for key, method in self.option_methods.items():
if key in self.unset_options:
method(self.unset_options.pop(key))

# Capture tolerance options
for tol_key in ["absolute_tolerance", "relative_tolerance"]:
if tol_key in self.unset_options:
max_unchanged_kwargs[tol_key] = self.unset_options.pop(tol_key)

def _sanitise_inputs(self):
"""
Expand All @@ -119,48 +134,29 @@ def _sanitise_inputs(self):
)
self.unset_options.pop("options")

# Check for duplicate keywords
expected_keys = ["max_iterations", "popsize"]
alternative_keys = ["maxiter", "population_size"]
for exp_key, alt_key in zip(expected_keys, alternative_keys):
if alt_key in self.unset_options.keys():
if exp_key in self.unset_options.keys():
raise Exception(
"The alternative {alt_key} option was passed in addition to the expected {exp_key} option."
)
else: # rename
self.unset_options[exp_key] = self.unset_options.pop(alt_key)

# Convert bounds to PINTS boundaries
if self.bounds is not None:
ignored_optimisers = (PintsGradientDescent, PintsAdam, PintsNelderMead)
if issubclass(self.pints_optimiser, ignored_optimisers):
print(f"NOTE: Boundaries ignored by {self.pints_optimiser}")
if issubclass(self.optimiser, ignored_optimisers):
print(f"NOTE: Boundaries ignored by {self.optimiser}")
self.bounds = None
else:
if issubclass(self.pints_optimiser, PintsPSO):
if issubclass(self.optimiser, PintsPSO):
if not all(
np.isfinite(value)
for sublist in self.bounds.values()
for value in sublist
):
raise ValueError(
f"Either all bounds or no bounds must be set for {self.pints_optimiser.__name__}."
f"Either all bounds or no bounds must be set for {self.optimiser.__name__}."
)
self._boundaries = PintsRectangularBoundaries(
self.bounds["lower"], self.bounds["upper"]
)

def name(self):
"""
Provides the name of the optimisation strategy.
Returns
-------
str
The name given by PINTS.
"""
return self.pints_optimiser.name()
"""Returns the name of the PINTS optimisation strategy."""
return self.optimiser.name()

def _run(self):
"""
Expand Down Expand Up @@ -211,8 +207,8 @@ def fun(x):

# For population based optimisers, don't use more workers than
# particles!
if isinstance(self.pints_optimiser, PintsPopulationBasedOptimiser):
n_workers = min(n_workers, self.pints_optimiser.population_size())
if isinstance(self.optimiser, PintsPopulationBasedOptimiser):
n_workers = min(n_workers, self.optimiser.population_size())
evaluator = PintsParallelEvaluator(fun, n_workers=n_workers)
else:
evaluator = PintsSequentialEvaluator(fun)
Expand All @@ -231,17 +227,17 @@ def fun(x):
try:
while running:
# Ask optimiser for new points
xs = self.pints_optimiser.ask()
xs = self.optimiser.ask()

# Evaluate points
fs = evaluator.evaluate(xs)

# Tell optimiser about function values
self.pints_optimiser.tell(fs)
self.optimiser.tell(fs)

# Update the scores
fb = self.pints_optimiser.f_best()
fg = self.pints_optimiser.f_guessed()
fb = self.optimiser.f_best()
fg = self.optimiser.f_guessed()
fg_user = (fb, fg) if self.minimising else (-fb, -fg)

# Check for significant changes against the absolute and relative tolerance
Expand All @@ -260,7 +256,7 @@ def fun(x):
_fs = [x[0] for x in fs] if self._needs_sensitivities else fs
self.log_update(
x=xs,
x_best=self.pints_optimiser.x_best(),
x_best=self.optimiser.x_best(),
cost=_fs if self.minimising else [-x for x in _fs],
cost_best=[fb] if self.minimising else [-fb],
)
Expand Down Expand Up @@ -313,7 +309,7 @@ def fun(x):
)

# Error in optimiser
error = self.pints_optimiser.stop()
error = self.optimiser.stop()
if error:
running = False
halt_message = str(error)
Expand All @@ -329,7 +325,7 @@ def fun(x):
print("Current position:")

# Show current parameters
x_user = self.pints_optimiser.x_guessed()
x_user = self.optimiser.x_guessed()
if self._transformation:
x_user = self._transformation.to_model(x_user)
for p in x_user:
Expand All @@ -347,11 +343,11 @@ def fun(x):

# Get best parameters
if self._use_f_guessed:
x = self.pints_optimiser.x_guessed()
f = self.pints_optimiser.f_guessed()
x = self.optimiser.x_guessed()
f = self.optimiser.f_guessed()
else:
x = self.pints_optimiser.x_best()
f = self.pints_optimiser.f_best()
x = self.optimiser.x_best()
f = self.optimiser.f_best()

# Inverse transform search parameters
if self._transformation:
Expand Down
20 changes: 10 additions & 10 deletions pybop/optimisers/optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,34 +32,34 @@ class Optimisation:
"""

def __init__(self, cost, optimiser=None, **optimiser_kwargs):
self.__dict__["optimiser"] = (
self.__dict__["optim"] = (
None # Pre-define optimiser to avoid recursion during initialisation
)
if optimiser is None:
self.optimiser = XNES(cost, **optimiser_kwargs)
self.optim = XNES(cost, **optimiser_kwargs)
elif issubclass(optimiser, BasePintsOptimiser):
self.optimiser = optimiser(cost, **optimiser_kwargs)
self.optim = optimiser(cost, **optimiser_kwargs)
elif issubclass(optimiser, BaseSciPyOptimiser):
self.optimiser = optimiser(cost, **optimiser_kwargs)
self.optim = optimiser(cost, **optimiser_kwargs)
else:
raise ValueError("Unknown optimiser type")

def run(self):
return self.optimiser.run()
return self.optim.run()

def __getattr__(self, attr):
if "optimiser" in self.__dict__ and hasattr(self.optimiser, attr):
return getattr(self.optimiser, attr)
if "optim" in self.__dict__ and hasattr(self.optim, attr):
return getattr(self.optim, attr)
raise AttributeError(
f"'{self.__class__.__name__}' object has no attribute '{attr}'"
)

def __setattr__(self, name: str, value) -> None:
if (
name in self.__dict__
or "optimiser" not in self.__dict__
or not hasattr(self.optimiser, name)
or "optim" not in self.__dict__
or not hasattr(self.optim, name)
):
object.__setattr__(self, name, value)
else:
setattr(self.optimiser, name, value)
setattr(self.optim, name, value)
Loading

0 comments on commit 3daba29

Please sign in to comment.