diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c01a60b..3f1c4f5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ ## Optimisations +- [#580](https://github.com/pybop-team/PyBOP/pull/580) - Random Search optimiser is implimented. - [#588](https://github.com/pybop-team/PyBOP/pull/588) - Makes `minimising` a property of `BaseOptimiser` set by the cost class. - [#512](https://github.com/pybop-team/PyBOP/pull/513) - Refactors `LogPosterior` with attributes pointing to composed likelihood object. - [#551](https://github.com/pybop-team/PyBOP/pull/551) - Refactors Optimiser arguments, `population_size` and `max_iterations` as default args, improves optimiser docstrings diff --git a/examples/scripts/comparison_examples/random_search.py b/examples/scripts/comparison_examples/random_search.py new file mode 100644 index 00000000..ce1e50da --- /dev/null +++ b/examples/scripts/comparison_examples/random_search.py @@ -0,0 +1,73 @@ +import numpy as np + +import pybop + +# Define model +parameter_set = pybop.ParameterSet.pybamm("Chen2020") +parameter_set.update( + { + "Negative electrode active material volume fraction": 0.7, + "Positive electrode active material volume fraction": 0.67, + } +) +model = pybop.lithium_ion.SPM(parameter_set=parameter_set) + +# Fitting parameters +parameters = pybop.Parameters( + pybop.Parameter( + "Negative electrode active material volume fraction", + bounds=[0.4, 0.75], + initial_value=0.41, + ), + pybop.Parameter( + "Positive electrode active material volume fraction", + bounds=[0.4, 0.75], + initial_value=0.41, + ), +) +experiment = pybop.Experiment( + [ + ( + "Discharge at 0.5C for 3 minutes (4 second period)", + "Charge at 0.5C for 3 minutes (4 second period)", + ), + ] +) +values = model.predict(initial_state={"Initial SoC": 0.7}, experiment=experiment) + +sigma = 0.002 +corrupt_values = values["Voltage [V]"].data + np.random.normal( + 0, sigma, len(values["Voltage [V]"].data) +) + +# Form dataset +dataset = pybop.Dataset( + { + "Time [s]": values["Time [s]"].data, + "Current function [A]": values["Current [A]"].data, + "Voltage [V]": corrupt_values, + } +) + +# Generate problem, cost function, and optimisation class +problem = pybop.FittingProblem(model, parameters, dataset) +cost = pybop.GaussianLogLikelihood(problem, sigma0=sigma * 4) +optim = pybop.Optimisation( + cost, + optimiser=pybop.RandomSearch, + max_iterations=100, +) + +results = optim.run() + +# Plot the timeseries output +pybop.plot.quick(problem, problem_inputs=results.x, title="Optimised Comparison") + +# Plot convergence +pybop.plot.convergence(optim) + +# Plot the parameter traces +pybop.plot.parameters(optim) + +# Plot the cost landscape with optimisation path +pybop.plot.contour(optim, steps=10) diff --git a/pybop/__init__.py b/pybop/__init__.py index d453109e..b29faf2f 100644 --- a/pybop/__init__.py +++ b/pybop/__init__.py @@ -124,6 +124,7 @@ # from .optimisers._cuckoo import CuckooSearchImpl +from .optimisers._random_search import RandomSearchImpl from .optimisers._adamw import AdamWImpl from .optimisers._gradient_descent import GradientDescentImpl from .optimisers.base_optimiser import BaseOptimiser, OptimisationResult, MultiOptimisationResult @@ -142,6 +143,7 @@ SNES, XNES, CuckooSearch, + RandomSearch, AdamW, ) from .optimisers.optimisation import Optimisation diff --git a/pybop/optimisers/_cuckoo.py b/pybop/optimisers/_cuckoo.py index dba591d7..b6f2e5c1 100644 --- a/pybop/optimisers/_cuckoo.py +++ b/pybop/optimisers/_cuckoo.py @@ -54,7 +54,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25): self._dim = len(x0) # Population size and abandon rate - self._n = self._population_size self._pa = pa self.step_size = self._sigma0 self.beta = 1.5 @@ -68,14 +67,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25): self._nests = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), - size=(self._n, self._dim), + size=(self._population_size, self._dim), ) else: self._nests = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._x0, self._sigma0, size=(self._population_size, self._dim) ) - self._fitness = np.full(self._n, np.inf) + self._fitness = np.full(self._population_size, np.inf) # Initialise best solutions self._x_best = np.copy(x0) @@ -112,7 +111,7 @@ def tell(self, replies): self._iterations += 1 # Compare cuckoos with current nests - for i in range(self._n): + for i in range(self._population_size): f_new = replies[i] if f_new < self._fitness[i]: self._nests[i] = self.cuckoos[i] @@ -122,7 +121,7 @@ def tell(self, replies): self._x_best = self.cuckoos[i] # Abandon some worse nests - n_abandon = int(self._pa * self._n) + n_abandon = int(self._pa * self._population_size) worst_nests = np.argsort(self._fitness)[-n_abandon:] for idx in worst_nests: self.abandon_nests(idx) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py new file mode 100644 index 00000000..d3737801 --- /dev/null +++ b/pybop/optimisers/_random_search.py @@ -0,0 +1,111 @@ +import numpy as np +from pints import PopulationBasedOptimiser + + +class RandomSearchImpl(PopulationBasedOptimiser): + """ + Random Search (RS) optimisation algorithm. + This algorithm explores the parameter space by randomly sampling points. + + The algorithm does the following: + 1. Initialise a population of solutions. + 2. At each iteration, generate `n` number of random positions within boundaries. + 3. Evaluate the quality/fitness of the positions. + 4. Replace the best position with improved position if found. + + Parameters: + population_size (optional): Number of solutions to evaluate per iteration. + + References: + The Random Search algorithm implemented in this work is based on principles outlined + in "Introduction to Stochastic Search and Optimization: Estimation, Simulation, and + Control" by Spall, J. C. (2003). + + The implementation inherits from the PINTS PopulationOptimiser. + """ + + def __init__(self, x0, sigma0=0.05, boundaries=None): + super().__init__(x0, sigma0, boundaries=boundaries) + + # Problem dimensionality + self._dim = len(x0) + + # Initialise best solution + self._x_best = np.copy(x0) + self._f_best = np.inf + self._running = False + self._ready_for_tell = False + + def ask(self): + """ + Returns a list of positions to evaluate in the optimiser-space. + """ + self._ready_for_tell = True + self._running = True + + # Generate random solutions + if self._boundaries: + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + return self._candidates + + self._candidates = np.random.normal( + self._x0, self._sigma0, size=(self._population_size, self._dim) + ) + return self.clip_candidates(self._candidates) + + def tell(self, replies): + """ + Receives a list of cost function values from points previously specified + by `self.ask()`, and updates the optimiser state accordingly. + """ + if not self._ready_for_tell: + raise RuntimeError("ask() must be called before tell().") + + # Evaluate solutions and update the best + for i in range(self._population_size): + f_new = replies[i] + if f_new < self._f_best: + self._f_best = f_new + self._x_best = self._candidates[i] + + def running(self): + """ + Returns ``True`` if the optimisation is in progress. + """ + return self._running + + def x_best(self): + """ + Returns the best parameter values found so far. + """ + return self._x_best + + def f_best(self): + """ + Returns the best score found so far. + """ + return self._f_best + + def name(self): + """ + Returns the name of the optimiser. + """ + return "Random Search" + + def clip_candidates(self, x): + """ + Clip the input array to the boundaries if available. + """ + if self._boundaries: + x = np.clip(x, self._boundaries.lower(), self._boundaries.upper()) + return x + + def _suggested_population_size(self): + """ + Returns a suggested population size based on the dimension of the parameter space. + """ + return 4 + int(3 * np.log(self._n_parameters)) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index 3512f7e7..0cfe551b 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -10,6 +10,7 @@ BasePintsOptimiser, CuckooSearchImpl, GradientDescentImpl, + RandomSearchImpl, ) @@ -652,3 +653,73 @@ def __init__( parallel, **optimiser_kwargs, ) + + +class RandomSearch(BasePintsOptimiser): + """ + Adapter for the Random Search optimiser in PyBOP. + + Random Search is a simple optimisation algorithm that samples parameter sets randomly + within the given boundaries and identifies the best solution based on fitness. + + This optimiser has been implemented for benchmarking and comparisons, convergence will be + better with one of other optimisers in the majority of cases. + + Parameters + ---------- + cost : callable + The cost function to be minimized. + max_iterations : int, optional + Maximum number of iterations for the optimisation. + min_iterations : int, optional (default=2) + Minimum number of iterations before termination. + max_unchanged_iterations : int, optional (default=15) + Maximum number of iterations without improvement before termination. + multistart : int, optional (default=1) + Number of optimiser restarts from randomly sample position. These positions + are sampled from the priors. + parallel : bool, optional (default=False) + Whether to run the optimisation in parallel. + **optimiser_kwargs : optional + Valid PINTS option keys and their values, for example: + x0 : array_like + Initial position from which optimisation will start. + population_size : int + Number of solutions to evaluate per iteration. + bounds : dict + A dictionary with 'lower' and 'upper' keys containing arrays for lower and + upper bounds on the parameters. + absolute_tolerance : float + Absolute tolerance for convergence checking. + relative_tolerance : float + Relative tolerance for convergence checking. + max_evaluations : int + Maximum number of function evaluations. + threshold : float + Threshold value for early termination. + + See Also + -------- + pybop.RandomSearchImpl : PyBOP implementation of Random Search algorithm. + """ + + def __init__( + self, + cost, + max_iterations: int = None, + min_iterations: int = 2, + max_unchanged_iterations: int = 15, + multistart: int = 1, + parallel: bool = False, + **optimiser_kwargs, + ): + super().__init__( + cost, + RandomSearchImpl, + max_iterations, + min_iterations, + max_unchanged_iterations, + multistart, + parallel, + **optimiser_kwargs, + ) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 66756fac..9d298515 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -88,6 +88,7 @@ def two_param_cost(self, model, two_parameters, dataset): (pybop.PSO, "Particle Swarm Optimisation (PSO)", False), (pybop.IRPropMin, "iRprop-", True), (pybop.NelderMead, "Nelder-Mead", False), + (pybop.RandomSearch, "Random Search", False), ], ) @pytest.mark.unit @@ -137,6 +138,7 @@ def test_no_optimisation_parameters(self, model, dataset): pybop.IRPropMin, pybop.NelderMead, pybop.CuckooSearch, + pybop.RandomSearch, ], ) @pytest.mark.unit @@ -276,7 +278,12 @@ def check_multistart(optim, n_iters, multistarts): ): optimiser(cost=cost, bounds=bounds_case) - if optimiser in [pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent]: + if optimiser in [ + pybop.AdamW, + pybop.CuckooSearch, + pybop.GradientDescent, + pybop.RandomSearch, + ]: optim = optimiser(cost) with pytest.raises( RuntimeError, match=re.escape("ask() must be called before tell().") @@ -337,6 +344,46 @@ def test_cuckoo_no_bounds(self, cost): optim.run() assert optim.optimiser._boundaries is None + @pytest.mark.unit + def test_randomsearch_bounds(self, two_param_cost): + # Test clip_candidates with bound + bounds = {"upper": [0.62, 0.57], "lower": [0.58, 0.53]} + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=bounds, max_iterations=1 + ) + candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) + clipped_candidates = optimiser.optimiser.clip_candidates(candidates) + expected_clipped = np.array([[0.58, 0.53], [0.62, 0.57]]) + assert np.allclose(clipped_candidates, expected_clipped) + + # Test clip_candidates without bound + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=None, max_iterations=1 + ) + candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) + clipped_candidates = optimiser.optimiser.clip_candidates(candidates) + assert np.allclose(clipped_candidates, candidates) + + @pytest.mark.unit + def test_randomsearch_ask_without_bounds(self, two_param_cost): + # Initialize optimiser without boundaries + optim = pybop.RandomSearch( + cost=two_param_cost, + sigma0=0.05, + x0=[0.6, 0.55], + bounds=None, + max_iterations=1, + ) + + # Set population size, generate candidates + optim.set_population_size(2) + candidates = optim.optimiser.ask() + + # Assert the shape of the candidates + assert candidates.shape == (2, 2) + assert np.all(candidates >= optim.optimiser._x0 - 6 * optim.optimiser._sigma0) + assert np.all(candidates <= optim.optimiser._x0 + 6 * optim.optimiser._sigma0) + @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): # Check a method that uses gradient information