From fcdee7497df1978f5b2623b22f90812596837b4f Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Thu, 5 Dec 2024 12:37:17 +0000 Subject: [PATCH 01/40] random_search optimiser added in the pints framework --- .../comparison_examples/random_search.py | 75 ++++++++++++ pybop/__init__.py | 2 + pybop/optimisers/_random_search.py | 111 ++++++++++++++++++ pybop/optimisers/pints_optimisers.py | 65 ++++++++++ 4 files changed, 253 insertions(+) create mode 100644 examples/scripts/comparison_examples/random_search.py create mode 100644 pybop/optimisers/_random_search.py diff --git a/examples/scripts/comparison_examples/random_search.py b/examples/scripts/comparison_examples/random_search.py new file mode 100644 index 00000000..f265dcb2 --- /dev/null +++ b/examples/scripts/comparison_examples/random_search.py @@ -0,0 +1,75 @@ +import numpy as np + +import pybop + +# Define model +parameter_set = pybop.ParameterSet.pybamm("Chen2020") +parameter_set.update( + { + "Negative electrode active material volume fraction": 0.7, + "Positive electrode active material volume fraction": 0.67, + } +) +model = pybop.lithium_ion.SPM(parameter_set=parameter_set) + +# Fitting parameters +parameters = pybop.Parameters( + pybop.Parameter( + "Negative electrode active material volume fraction", + prior=pybop.Gaussian(0.6, 0.05), + bounds=[0.4, 0.75], + initial_value=0.41, + ), + pybop.Parameter( + "Positive electrode active material volume fraction", + prior=pybop.Gaussian(0.48, 0.05), + bounds=[0.4, 0.75], + initial_value=0.41, + ), +) +experiment = pybop.Experiment( + [ + ( + "Discharge at 0.5C for 3 minutes (4 second period)", + "Charge at 0.5C for 3 minutes (4 second period)", + ), + ] +) +values = model.predict(initial_state={"Initial SoC": 0.7}, experiment=experiment) + +sigma = 0.002 +corrupt_values = values["Voltage [V]"].data + np.random.normal( + 0, sigma, len(values["Voltage [V]"].data) +) + +# Form dataset +dataset = pybop.Dataset( + { + "Time [s]": values["Time [s]"].data, + "Current function [A]": values["Current [A]"].data, + "Voltage [V]": corrupt_values, + } +) + +# Generate problem, cost function, and optimisation class +problem = pybop.FittingProblem(model, parameters, dataset) +cost = pybop.GaussianLogLikelihood(problem, sigma0=sigma * 4) +optim = pybop.Optimisation( + cost, + optimiser=pybop.RandomSearch, + max_iterations=100, +) + +results = optim.run() + +# Plot the timeseries output +pybop.plot.quick(problem, problem_inputs=results.x, title="Optimised Comparison") + +# Plot convergence +pybop.plot.convergence(optim) + +# Plot the parameter traces +pybop.plot.parameters(optim) + +# Plot the cost landscape with optimisation path +pybop.plot.contour(optim, steps=15) diff --git a/pybop/__init__.py b/pybop/__init__.py index 41928142..6403b938 100644 --- a/pybop/__init__.py +++ b/pybop/__init__.py @@ -123,6 +123,7 @@ # from .optimisers._cuckoo import CuckooSearchImpl +from .optimisers._random_search import RandomSearchImpl from .optimisers._adamw import AdamWImpl from .optimisers._gradient_descent import GradientDescentImpl from .optimisers.base_optimiser import BaseOptimiser, OptimisationResult @@ -142,6 +143,7 @@ SNES, XNES, CuckooSearch, + RandomSearch, AdamW, ) from .optimisers.optimisation import Optimisation diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py new file mode 100644 index 00000000..e8b794dc --- /dev/null +++ b/pybop/optimisers/_random_search.py @@ -0,0 +1,111 @@ +import numpy as np +from pints import PopulationBasedOptimiser + + +class RandomSearchImpl(PopulationBasedOptimiser): + """ + Random Search (RS) optimisation algorithm. + This algorithm explores the parameter space by randomly sampling points. + + The algorithm is simple: + 1. Initialise a population of solutions. + 2. At each iteration, generate new random solutions within boundaries. + 3. Evaluate the quality/fitness of the solutions. + 4. Update the best solution found so far. + + Parameters: + - population_size (optional): Number of solutions to evaluate per iteration. + + References: + - The Random Search algorithm implemented in this work is based on principles outlined + in: + Introduction to Stochastic Search and Optimization: Estimation, Simulation, and Control + - by Spall, J. C. (2003). + The implementation leverages the pints library framework, which provides tools for + population-based optimization methods. + """ + + def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None): + # Problem dimensionality + self._dim = len(x0) # Initialize _dim first + + super().__init__(x0, sigma0, boundaries=boundaries) + + # Population size, defaulting to a suggested value + self._population_size = population_size or self._suggested_population_size() + self.step_size = self._sigma0 + + # Initialise best solutions + self._x_best = np.copy(x0) + self._f_best = np.inf + + # Iteration counter + self._iterations = 0 + + # Flags + self._running = False + self._ready_for_tell = False + + def ask(self): + """ + Returns a list of next points in the parameter-space + to evaluate from the optimiser. + """ + self._ready_for_tell = True + self._running = True + + # Generate random solutions within the boundaries + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + return self._candidates + + def tell(self, replies): + """ + Receives a list of function values from the cost function from points + previously specified by `self.ask()`, and updates the optimiser state + accordingly. + """ + if not self._ready_for_tell: + raise RuntimeError("Optimiser not ready for tell()") + + self._iterations += 1 + self._ready_for_tell = False + + # Update the best solution + for i, fitness in enumerate(replies): + if fitness < self._f_best: + self._f_best = fitness + self._x_best = self._candidates[i] + + def running(self): + """ + Returns ``True`` if the optimisation is in progress. + """ + return self._running + + def x_best(self): + """ + Returns the best parameter values found so far. + """ + return self._x_best + + def f_best(self): + """ + Returns the best score found so far. + """ + return self._f_best + + def name(self): + """ + Returns the name of the optimiser. + """ + return "Random Search" + + def _suggested_population_size(self): + """ + Returns a suggested population size based on the dimension of the parameter space. + """ + return 10 + int(2 * np.log(self._dim)) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index ac19fab8..d40a9578 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -13,6 +13,7 @@ BasePintsOptimiser, CuckooSearchImpl, GradientDescentImpl, + RandomSearchImpl, ) @@ -682,3 +683,67 @@ def __init__( parallel, **optimiser_kwargs, ) + +class RandomSearch(BasePintsOptimiser): + """ + Adapter for the Random Search optimiser in PyBOP. + + Random Search is a simple optimisation algorithm that samples parameter + sets randomly within the given boundaries and identifies the best solution + based on fitness. + + Parameters + ---------- + cost : callable + The cost function to be minimized. + max_iterations : int, optional + Maximum number of iterations for the optimisation. + min_iterations : int, optional (default=2) + Minimum number of iterations before termination. + max_unchanged_iterations : int, optional (default=15) + Maximum number of iterations without improvement before termination. + parallel : bool, optional (default=False) + Whether to run the optimisation in parallel. + **optimiser_kwargs : optional + Valid PINTS option keys and their values, for example: + x0 : array_like + Initial position from which optimisation will start. + population_size : int + Number of solutions to evaluate per iteration. + bounds : dict + A dictionary with 'lower' and 'upper' keys containing arrays for lower and + upper bounds on the parameters. + absolute_tolerance : float + Absolute tolerance for convergence checking. + relative_tolerance : float + Relative tolerance for convergence checking. + max_evaluations : int + Maximum number of function evaluations. + threshold : float + Threshold value for early termination. + + See Also + -------- + pybop.RandomSearchImpl : PyBOP implementation of Random Search algorithm. + """ + + def __init__( + self, + cost, + max_iterations: int = None, + min_iterations: int = 20, + max_unchanged_iterations: int = 100, + population_size: int = 10, + parallel: bool = False, + **optimiser_kwargs, + ): + + super().__init__( + cost, + RandomSearchImpl, + max_iterations, + min_iterations, + max_unchanged_iterations, + parallel, + **optimiser_kwargs, + ) From 3dc64bfcc05faa023a0c384671aea00d9a02c846 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 12:45:36 +0000 Subject: [PATCH 02/40] style: pre-commit fixes --- pybop/optimisers/_random_search.py | 8 ++++---- pybop/optimisers/pints_optimisers.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index e8b794dc..58e7ebd5 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -12,13 +12,13 @@ class RandomSearchImpl(PopulationBasedOptimiser): 2. At each iteration, generate new random solutions within boundaries. 3. Evaluate the quality/fitness of the solutions. 4. Update the best solution found so far. - + Parameters: - population_size (optional): Number of solutions to evaluate per iteration. References: - The Random Search algorithm implemented in this work is based on principles outlined - in: + in: Introduction to Stochastic Search and Optimization: Estimation, Simulation, and Control - by Spall, J. C. (2003). The implementation leverages the pints library framework, which provides tools for @@ -30,7 +30,7 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None): self._dim = len(x0) # Initialize _dim first super().__init__(x0, sigma0, boundaries=boundaries) - + # Population size, defaulting to a suggested value self._population_size = population_size or self._suggested_population_size() self.step_size = self._sigma0 @@ -70,7 +70,7 @@ def tell(self, replies): """ if not self._ready_for_tell: raise RuntimeError("Optimiser not ready for tell()") - + self._iterations += 1 self._ready_for_tell = False diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index d40a9578..9026c8b6 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -684,12 +684,13 @@ def __init__( **optimiser_kwargs, ) + class RandomSearch(BasePintsOptimiser): """ Adapter for the Random Search optimiser in PyBOP. - Random Search is a simple optimisation algorithm that samples parameter - sets randomly within the given boundaries and identifies the best solution + Random Search is a simple optimisation algorithm that samples parameter + sets randomly within the given boundaries and identifies the best solution based on fitness. Parameters @@ -737,7 +738,6 @@ def __init__( parallel: bool = False, **optimiser_kwargs, ): - super().__init__( cost, RandomSearchImpl, From 2d5c7d2196d296ae5fcb821e39139e982673bd50 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Thu, 5 Dec 2024 12:56:40 +0000 Subject: [PATCH 03/40] description texts updated --- pybop/optimisers/_random_search.py | 9 ++++----- pybop/optimisers/pints_optimisers.py | 5 ++--- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index e8b794dc..0a52f42c 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -18,9 +18,8 @@ class RandomSearchImpl(PopulationBasedOptimiser): References: - The Random Search algorithm implemented in this work is based on principles outlined - in: - Introduction to Stochastic Search and Optimization: Estimation, Simulation, and Control - - by Spall, J. C. (2003). + in "Introduction to Stochastic Search and Optimization: Estimation, Simulation, and + Control" by Spall, J. C. (2003). The implementation leverages the pints library framework, which provides tools for population-based optimization methods. """ @@ -30,7 +29,7 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None): self._dim = len(x0) # Initialize _dim first super().__init__(x0, sigma0, boundaries=boundaries) - + # Population size, defaulting to a suggested value self._population_size = population_size or self._suggested_population_size() self.step_size = self._sigma0 @@ -70,7 +69,7 @@ def tell(self, replies): """ if not self._ready_for_tell: raise RuntimeError("Optimiser not ready for tell()") - + self._iterations += 1 self._ready_for_tell = False diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index d40a9578..33ed0259 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -688,9 +688,8 @@ class RandomSearch(BasePintsOptimiser): """ Adapter for the Random Search optimiser in PyBOP. - Random Search is a simple optimisation algorithm that samples parameter - sets randomly within the given boundaries and identifies the best solution - based on fitness. + Random Search is a simple optimisation algorithm that samples parameter sets randomly + within the given boundaries and identifies the best solution based on fitness. Parameters ---------- From 8f912dcacccc5ba6893633133a31b1a5e2a3f484 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 13:00:50 +0000 Subject: [PATCH 04/40] style: pre-commit fixes --- pybop/optimisers/_random_search.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index cab84549..1ad14d4e 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -30,7 +30,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None): super().__init__(x0, sigma0, boundaries=boundaries) - # Population size, defaulting to a suggested value self._population_size = population_size or self._suggested_population_size() self.step_size = self._sigma0 @@ -71,7 +70,6 @@ def tell(self, replies): if not self._ready_for_tell: raise RuntimeError("Optimiser not ready for tell()") - self._iterations += 1 self._ready_for_tell = False From 51f8f2a11c32ab5565bfdb0c5f8a4177381432f6 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Fri, 6 Dec 2024 11:49:57 +0000 Subject: [PATCH 05/40] random_search updated --- pybop/optimisers/_random_search.py | 6 +++--- pybop/optimisers/pints_optimisers.py | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 1ad14d4e..b44c455a 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -24,14 +24,14 @@ class RandomSearchImpl(PopulationBasedOptimiser): population-based optimization methods. """ - def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None): + def __init__(self, x0, sigma0=0.05, boundaries=None): # Problem dimensionality self._dim = len(x0) # Initialize _dim first super().__init__(x0, sigma0, boundaries=boundaries) # Population size, defaulting to a suggested value - self._population_size = population_size or self._suggested_population_size() + self._population_size = self._suggested_population_size() self.step_size = self._sigma0 # Initialise best solutions @@ -107,4 +107,4 @@ def _suggested_population_size(self): """ Returns a suggested population size based on the dimension of the parameter space. """ - return 10 + int(2 * np.log(self._dim)) + return 4 + int(3 * np.log(self._n_parameters)) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index d6028753..d472764f 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -733,7 +733,6 @@ def __init__( max_iterations: int = None, min_iterations: int = 20, max_unchanged_iterations: int = 100, - population_size: int = 10, parallel: bool = False, **optimiser_kwargs, ): From 6de4820e489b4904838bd4322af4828f2781201f Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 10 Dec 2024 14:29:07 +0000 Subject: [PATCH 06/40] population size input in random search modified --- pybop/optimisers/_random_search.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index b44c455a..9a74c7b3 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -30,8 +30,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): super().__init__(x0, sigma0, boundaries=boundaries) - # Population size, defaulting to a suggested value - self._population_size = self._suggested_population_size() self.step_size = self._sigma0 # Initialise best solutions From 3a11ffb26dca33951ae58a1fe0ee4dbd8f38e77c Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Mon, 16 Dec 2024 17:28:09 +0000 Subject: [PATCH 07/40] example updated --- examples/scripts/comparison_examples/random_search.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/scripts/comparison_examples/random_search.py b/examples/scripts/comparison_examples/random_search.py index f265dcb2..04e0d8ed 100644 --- a/examples/scripts/comparison_examples/random_search.py +++ b/examples/scripts/comparison_examples/random_search.py @@ -16,13 +16,11 @@ parameters = pybop.Parameters( pybop.Parameter( "Negative electrode active material volume fraction", - prior=pybop.Gaussian(0.6, 0.05), bounds=[0.4, 0.75], initial_value=0.41, ), pybop.Parameter( "Positive electrode active material volume fraction", - prior=pybop.Gaussian(0.48, 0.05), bounds=[0.4, 0.75], initial_value=0.41, ), From a57cf41728320640b458d6d5ff0aca7d65717c6c Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Mon, 16 Dec 2024 17:36:20 +0000 Subject: [PATCH 08/40] unit tests added for randomsearch --- tests/unit/test_optimisation.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index e116b146..85c30356 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -89,6 +89,7 @@ def two_param_cost(self, model, two_parameters, dataset): (pybop.PSO, "Particle Swarm Optimisation (PSO)", False), (pybop.IRPropMin, "iRprop-", True), (pybop.NelderMead, "Nelder-Mead", False), + (pybop.RandomSearch, "Random Search", False), ], ) @pytest.mark.unit @@ -139,6 +140,7 @@ def test_no_optimisation_parameters(self, model, dataset): pybop.IRPropMin, pybop.NelderMead, pybop.CuckooSearch, + pybop.RandomSearch, ], ) @pytest.mark.unit @@ -259,7 +261,7 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): ): optimiser(cost=cost, bounds=bounds_case) - if optimiser in [pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent]: + if optimiser in [pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, pybop.RandomSearch]: optim = optimiser(cost) with pytest.raises( RuntimeError, match=re.escape("ask() must be called before tell().") @@ -320,6 +322,12 @@ def test_cuckoo_no_bounds(self, cost): optim.run() assert optim.optimiser._boundaries is None + @pytest.mark.unit + def test_randomsearch_no_bounds(self, cost): + optim = pybop.RandomSearch(cost=cost, bounds=None, max_iterations=1) + optim.run() + assert optim.optimiser._boundaries is None + @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): # Check a method that uses gradient information From eea7bfb6b2d3ecae08d91b996bcf9b3c133569bc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:36:41 +0000 Subject: [PATCH 09/40] style: pre-commit fixes --- tests/unit/test_optimisation.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 85c30356..4e7a1782 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -261,7 +261,12 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): ): optimiser(cost=cost, bounds=bounds_case) - if optimiser in [pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, pybop.RandomSearch]: + if optimiser in [ + pybop.AdamW, + pybop.CuckooSearch, + pybop.GradientDescent, + pybop.RandomSearch, + ]: optim = optimiser(cost) with pytest.raises( RuntimeError, match=re.escape("ask() must be called before tell().") From 61e129928fbdd56980db4c1e869c157553eafa37 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 11:52:26 +0000 Subject: [PATCH 10/40] none type boundary handling modified --- pybop/optimisers/_random_search.py | 24 ++++++++++++++++++------ pybop/optimisers/pints_optimisers.py | 6 +++--- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 9a74c7b3..163945fd 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -1,3 +1,5 @@ +import warnings + import numpy as np from pints import PopulationBasedOptimiser @@ -51,13 +53,23 @@ def ask(self): self._ready_for_tell = True self._running = True + # Generate random solutions within the boundaries - self._candidates = np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._population_size, self._dim), - ) - return self._candidates + if self._boundaries: + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + return self._candidates + else: + warnings.warn("No boundaries provided. Generating candidates using a normal distribution centered on the initial point.", + UserWarning, + stacklevel=2, + ) + self._candidates = np.random.normal( + self._x0, self._sigma0, size=(self._n, self._dim) + ) def tell(self, replies): """ diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index d472764f..28a46170 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -730,9 +730,9 @@ class RandomSearch(BasePintsOptimiser): def __init__( self, cost, - max_iterations: int = None, - min_iterations: int = 20, - max_unchanged_iterations: int = 100, + max_iterations: int = 10, + min_iterations: int = 2, + max_unchanged_iterations: int = 2, parallel: bool = False, **optimiser_kwargs, ): From 99a44cee3dd1d0e86d6472757428fbf53b395989 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:53:32 +0000 Subject: [PATCH 11/40] style: pre-commit fixes --- pybop/optimisers/_random_search.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 163945fd..c37679ae 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -53,7 +53,6 @@ def ask(self): self._ready_for_tell = True self._running = True - # Generate random solutions within the boundaries if self._boundaries: self._candidates = np.random.uniform( @@ -63,9 +62,10 @@ def ask(self): ) return self._candidates else: - warnings.warn("No boundaries provided. Generating candidates using a normal distribution centered on the initial point.", - UserWarning, - stacklevel=2, + warnings.warn( + "No boundaries provided. Generating candidates using a normal distribution centered on the initial point.", + UserWarning, + stacklevel=2, ) self._candidates = np.random.normal( self._x0, self._sigma0, size=(self._n, self._dim) From 4a1f0c87b87359262a0edf1e4cf078d2671429ea Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 13:33:10 +0000 Subject: [PATCH 12/40] updated --- pybop/optimisers/_random_search.py | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index c37679ae..9a74c7b3 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -1,5 +1,3 @@ -import warnings - import numpy as np from pints import PopulationBasedOptimiser @@ -54,22 +52,12 @@ def ask(self): self._running = True # Generate random solutions within the boundaries - if self._boundaries: - self._candidates = np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._population_size, self._dim), - ) - return self._candidates - else: - warnings.warn( - "No boundaries provided. Generating candidates using a normal distribution centered on the initial point.", - UserWarning, - stacklevel=2, - ) - self._candidates = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) - ) + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + return self._candidates def tell(self, replies): """ From 2ed13d080c8e6e2fdcd5b5c2fe5fa8f895be49fd Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 13:42:48 +0000 Subject: [PATCH 13/40] updated --- pybop/optimisers/pints_optimisers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index 28a46170..914ba683 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -730,9 +730,9 @@ class RandomSearch(BasePintsOptimiser): def __init__( self, cost, - max_iterations: int = 10, + max_iterations: int = None, min_iterations: int = 2, - max_unchanged_iterations: int = 2, + max_unchanged_iterations: int = 15, parallel: bool = False, **optimiser_kwargs, ): From 76258e4b6a2540598416d0a21bb8ffef81777b63 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 14:07:02 +0000 Subject: [PATCH 14/40] unit tests updated --- tests/unit/test_optimisation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 4e7a1782..bd39450e 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -328,10 +328,10 @@ def test_cuckoo_no_bounds(self, cost): assert optim.optimiser._boundaries is None @pytest.mark.unit - def test_randomsearch_no_bounds(self, cost): - optim = pybop.RandomSearch(cost=cost, bounds=None, max_iterations=1) - optim.run() - assert optim.optimiser._boundaries is None + def test_randomsearch(self, cost): + optim = pybop.RandomSearch(cost=cost, max_iterations=10) + results = optim.run() + assert results.final_cost is not None @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): From 8fc749ecf71f9fbb173311be05675281311f2ee4 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 14:42:30 +0000 Subject: [PATCH 15/40] unit test updated --- tests/unit/test_optimisation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index bd39450e..478f7521 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -329,7 +329,7 @@ def test_cuckoo_no_bounds(self, cost): @pytest.mark.unit def test_randomsearch(self, cost): - optim = pybop.RandomSearch(cost=cost, max_iterations=10) + optim = pybop.RandomSearch(cost=cost, max_iterations=1) results = optim.run() assert results.final_cost is not None From accaf50f560a977fb88ad76b245fec40a121a561 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 15:54:18 +0000 Subject: [PATCH 16/40] unit tests modified --- tests/unit/test_optimisation.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 478f7521..d17bbbec 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -327,11 +327,11 @@ def test_cuckoo_no_bounds(self, cost): optim.run() assert optim.optimiser._boundaries is None - @pytest.mark.unit - def test_randomsearch(self, cost): - optim = pybop.RandomSearch(cost=cost, max_iterations=1) - results = optim.run() - assert results.final_cost is not None + # @pytest.mark.unit + # def test_randomsearch(self, cost): + # optim = pybop.RandomSearch(cost=cost, max_iterations=1) + # results = optim.run() + # assert results.final_cost is not None @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): From c9710b5a142e274d6f637f2d4248238b14becf3f Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 17:42:33 +0000 Subject: [PATCH 17/40] updated --- pybop/optimisers/_random_search.py | 58 ++++++++++++++++++++---------- 1 file changed, 40 insertions(+), 18 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 9a74c7b3..3ac2500a 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -30,16 +30,25 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): super().__init__(x0, sigma0, boundaries=boundaries) - self.step_size = self._sigma0 + # Optimisation parameters + self._population_size = self._population_size + self._iterations = 0 - # Initialise best solutions - self._x_best = np.copy(x0) - self._f_best = np.inf + # Set boundaries + self._boundaries = boundaries - # Iteration counter - self._iterations = 0 + # Initialise population + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) if self._boundaries else np.random.normal( + self._x0, self._sigma0, size=(self._n, self._dim) + ) - # Flags + # Initialise best solution + self._x_best = np.copy(x0) + self._f_best = np.inf self._running = False self._ready_for_tell = False @@ -52,12 +61,17 @@ def ask(self): self._running = True # Generate random solutions within the boundaries - self._candidates = np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._population_size, self._dim), - ) - return self._candidates + if self._boundaries: + self._candidates = np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + else: + self._candidates = np.random.normal( + self._x0, self._sigma0, size=(self._n, self._dim) + ) + return self.clip_candidates(self._candidates) def tell(self, replies): """ @@ -69,12 +83,12 @@ def tell(self, replies): raise RuntimeError("Optimiser not ready for tell()") self._iterations += 1 - self._ready_for_tell = False - # Update the best solution - for i, fitness in enumerate(replies): - if fitness < self._f_best: - self._f_best = fitness + # Evaluate solutions and update the best + for i in range(self._population_size): + f_new = replies[i] + if f_new < self._f_best: + self._f_best = f_new self._x_best = self._candidates[i] def running(self): @@ -101,6 +115,14 @@ def name(self): """ return "Random Search" + def clip_candidates(self, x): + """ + Clip the input array to the boundaries if available. + """ + if self._boundaries: + x = np.clip(x, self._boundaries.lower(), self._boundaries.upper()) + return x + def _suggested_population_size(self): """ Returns a suggested population size based on the dimension of the parameter space. From d3d146cc40afcb6c996df802516da598d75d0e9d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 17:43:45 +0000 Subject: [PATCH 18/40] style: pre-commit fixes --- pybop/optimisers/_random_search.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 3ac2500a..52b902b2 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -38,12 +38,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): self._boundaries = boundaries # Initialise population - self._candidates = np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._population_size, self._dim), - ) if self._boundaries else np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._candidates = ( + np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._population_size, self._dim), + ) + if self._boundaries + else np.random.normal(self._x0, self._sigma0, size=(self._n, self._dim)) ) # Initialise best solution @@ -69,7 +71,7 @@ def ask(self): ) else: self._candidates = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._x0, self._sigma0, size=(self._n, self._dim) ) return self.clip_candidates(self._candidates) From f2842bb12d96448f68d84c7948f3ac6a2fe3d185 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 22:41:03 +0000 Subject: [PATCH 19/40] randomsearch modified --- pybop/optimisers/_random_search.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 3ac2500a..c405fb69 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -31,7 +31,7 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): super().__init__(x0, sigma0, boundaries=boundaries) # Optimisation parameters - self._population_size = self._population_size + self._n = self._population_size self._iterations = 0 # Set boundaries @@ -41,7 +41,7 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): self._candidates = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), - size=(self._population_size, self._dim), + size=(self._n, self._dim), ) if self._boundaries else np.random.normal( self._x0, self._sigma0, size=(self._n, self._dim) ) @@ -65,7 +65,7 @@ def ask(self): self._candidates = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), - size=(self._population_size, self._dim), + size=(self._n, self._dim), ) else: self._candidates = np.random.normal( @@ -80,12 +80,12 @@ def tell(self, replies): accordingly. """ if not self._ready_for_tell: - raise RuntimeError("Optimiser not ready for tell()") + raise RuntimeError("ask() must be called before tell().") self._iterations += 1 # Evaluate solutions and update the best - for i in range(self._population_size): + for i in range(self._n): f_new = replies[i] if f_new < self._f_best: self._f_best = f_new From 7fd4a0f445438496331e7ed24f9b70c179fdfc70 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 22:56:17 +0000 Subject: [PATCH 20/40] style: pre-commit fixes --- pybop/optimisers/_random_search.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index c405fb69..683435c7 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -38,12 +38,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): self._boundaries = boundaries # Initialise population - self._candidates = np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._n, self._dim), - ) if self._boundaries else np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._candidates = ( + np.random.uniform( + low=self._boundaries.lower(), + high=self._boundaries.upper(), + size=(self._n, self._dim), + ) + if self._boundaries + else np.random.normal(self._x0, self._sigma0, size=(self._n, self._dim)) ) # Initialise best solution @@ -69,7 +71,7 @@ def ask(self): ) else: self._candidates = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._x0, self._sigma0, size=(self._n, self._dim) ) return self.clip_candidates(self._candidates) From 2024b72b73438494f93e7bf5c56f1b1f55689012 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 23:05:28 +0000 Subject: [PATCH 21/40] boundary logic updated --- pybop/optimisers/_random_search.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 683435c7..177ee859 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -34,9 +34,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): self._n = self._population_size self._iterations = 0 - # Set boundaries - self._boundaries = boundaries - # Initialise population self._candidates = ( np.random.uniform( From 8435f5679b4dcf8d1f89f044f494e28d698502cf Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Tue, 17 Dec 2024 23:38:52 +0000 Subject: [PATCH 22/40] unit tests updated --- tests/unit/test_optimisation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index d17bbbec..c98cb88a 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -265,7 +265,6 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, - pybop.RandomSearch, ]: optim = optimiser(cost) with pytest.raises( From cd8b4d224e17f0c29e998bfeae6433618c0febf7 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 00:08:58 +0000 Subject: [PATCH 23/40] unit tests updated --- tests/unit/test_optimisation.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index c98cb88a..877ea69c 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -265,6 +265,7 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, + pybop.RandomSearch, ]: optim = optimiser(cost) with pytest.raises( @@ -312,6 +313,11 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): assert optim.optimiser.n_hyper_parameters() == 5 assert optim.optimiser.x_guessed() == optim.optimiser._x0 + if optimiser is pybop.RandomSearch: + assert optimiser._dim == len(optim.optimiser._x0) + assert optimiser.x_best().shape == optim.optimiser._x0.shape + assert optimiser.f_best() == np.inf + assert optimiser._iterations == 0 else: x0 = cost.parameters.initial_value() assert optim.x0 == x0 From 73575ea5e53c0c5ef42575b6208b72ea5d8392b3 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 00:25:05 +0000 Subject: [PATCH 24/40] unit test changed --- tests/unit/test_optimisation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 877ea69c..493fdf82 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -265,7 +265,7 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, - pybop.RandomSearch, + #pybop.RandomSearch, ]: optim = optimiser(cost) with pytest.raises( From 12890bb0df11c89091edcee88343925cefaea36d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 00:26:28 +0000 Subject: [PATCH 25/40] style: pre-commit fixes --- tests/unit/test_optimisation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 493fdf82..89f68b9d 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -265,7 +265,7 @@ def check_bounds_handling(optim, expected_bounds, should_raise=False): pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, - #pybop.RandomSearch, + # pybop.RandomSearch, ]: optim = optimiser(cost) with pytest.raises( From 920cb6b03f868c45bba8cc4910d36b2abe067075 Mon Sep 17 00:00:00 2001 From: Brady Planden Date: Wed, 18 Dec 2024 10:53:41 +0000 Subject: [PATCH 26/40] fix: RandomSearch with multistart --- pybop/optimisers/pints_optimisers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index 4ed012ad..38ef5d87 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -703,6 +703,7 @@ def __init__( max_iterations: int = None, min_iterations: int = 2, max_unchanged_iterations: int = 15, + multistart: int = 1, parallel: bool = False, **optimiser_kwargs, ): @@ -712,6 +713,7 @@ def __init__( max_iterations, min_iterations, max_unchanged_iterations, + multistart, parallel, **optimiser_kwargs, ) From 37b9b97bc44db36d2c3dc02d680ddd5cfdbc2f64 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 12:20:32 +0000 Subject: [PATCH 27/40] unit test updated --- tests/unit/test_optimisation.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 4d205f2b..384286d7 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -276,7 +276,7 @@ def check_multistart(optim, n_iters, multistarts): pybop.AdamW, pybop.CuckooSearch, pybop.GradientDescent, - # pybop.RandomSearch, + pybop.RandomSearch, ]: optim = optimiser(cost) with pytest.raises( @@ -324,11 +324,11 @@ def check_multistart(optim, n_iters, multistarts): assert optim.optimiser.n_hyper_parameters() == 5 assert optim.optimiser.x_guessed() == optim.optimiser._x0 - if optimiser is pybop.RandomSearch: - assert optimiser._dim == len(optim.optimiser._x0) - assert optimiser.x_best().shape == optim.optimiser._x0.shape - assert optimiser.f_best() == np.inf - assert optimiser._iterations == 0 + # if optimiser is pybop.RandomSearch: + # assert optimiser._dim == len(optim.optimiser._x0) + # assert optimiser.x_best().shape == optim.optimiser._x0.shape + # assert optimiser.f_best() == np.inf + # assert optimiser._iterations == 0 else: x0 = cost.parameters.initial_value() assert optim.x0 == x0 @@ -343,11 +343,11 @@ def test_cuckoo_no_bounds(self, cost): optim.run() assert optim.optimiser._boundaries is None - # @pytest.mark.unit - # def test_randomsearch(self, cost): - # optim = pybop.RandomSearch(cost=cost, max_iterations=1) - # results = optim.run() - # assert results.final_cost is not None + @pytest.mark.unit + def test_randomsearch(self, cost): + optim = pybop.RandomSearch(cost=cost, bounds=None, max_iterations=1) + optim.run() + assert optim.optimiser._boundaries is None @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): From 03a7a05a38438c8e3cdf74262925d9f67f15de79 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 12:46:26 +0000 Subject: [PATCH 28/40] unit test modified --- tests/unit/test_optimisation.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 384286d7..e07c059f 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -324,11 +324,6 @@ def check_multistart(optim, n_iters, multistarts): assert optim.optimiser.n_hyper_parameters() == 5 assert optim.optimiser.x_guessed() == optim.optimiser._x0 - # if optimiser is pybop.RandomSearch: - # assert optimiser._dim == len(optim.optimiser._x0) - # assert optimiser.x_best().shape == optim.optimiser._x0.shape - # assert optimiser.f_best() == np.inf - # assert optimiser._iterations == 0 else: x0 = cost.parameters.initial_value() assert optim.x0 == x0 From b230a62d8cd59ac9bbaee04d890bbb60ec575582 Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 15:56:43 +0000 Subject: [PATCH 29/40] suggested changes incorporated --- examples/scripts/comparison_examples/random_search.py | 2 +- pybop/optimisers/_random_search.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/scripts/comparison_examples/random_search.py b/examples/scripts/comparison_examples/random_search.py index 04e0d8ed..ce1e50da 100644 --- a/examples/scripts/comparison_examples/random_search.py +++ b/examples/scripts/comparison_examples/random_search.py @@ -70,4 +70,4 @@ pybop.plot.parameters(optim) # Plot the cost landscape with optimisation path -pybop.plot.contour(optim, steps=15) +pybop.plot.contour(optim, steps=10) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 177ee859..4bcc2866 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -25,11 +25,11 @@ class RandomSearchImpl(PopulationBasedOptimiser): """ def __init__(self, x0, sigma0=0.05, boundaries=None): - # Problem dimensionality - self._dim = len(x0) # Initialize _dim first - super().__init__(x0, sigma0, boundaries=boundaries) + # Problem dimensionality + self._dim = len(x0) + # Optimisation parameters self._n = self._population_size self._iterations = 0 From 95eaddb4384a71b86bc9ebd3167da5b8175f898c Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 16:20:54 +0000 Subject: [PATCH 30/40] suggested changes updated --- pybop/optimisers/_random_search.py | 49 ++++++++++------------------ pybop/optimisers/pints_optimisers.py | 6 ++++ 2 files changed, 23 insertions(+), 32 deletions(-) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 4bcc2866..97f903f7 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -7,21 +7,21 @@ class RandomSearchImpl(PopulationBasedOptimiser): Random Search (RS) optimisation algorithm. This algorithm explores the parameter space by randomly sampling points. - The algorithm is simple: + The algorithm does the following: 1. Initialise a population of solutions. - 2. At each iteration, generate new random solutions within boundaries. - 3. Evaluate the quality/fitness of the solutions. - 4. Update the best solution found so far. + 2. At each iteration, generate `n` number of random positions within boundaries. + 3. Evaluate the quality/fitness of the positions. + 4. Replace the best position with improved position if found. Parameters: - - population_size (optional): Number of solutions to evaluate per iteration. + population_size (optional): Number of solutions to evaluate per iteration. References: - - The Random Search algorithm implemented in this work is based on principles outlined + The Random Search algorithm implemented in this work is based on principles outlined in "Introduction to Stochastic Search and Optimization: Estimation, Simulation, and Control" by Spall, J. C. (2003). - The implementation leverages the pints library framework, which provides tools for - population-based optimization methods. + + The implementation inherits from the PINTS PopulationOptimiser. """ def __init__(self, x0, sigma0=0.05, boundaries=None): @@ -32,18 +32,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): # Optimisation parameters self._n = self._population_size - self._iterations = 0 - - # Initialise population - self._candidates = ( - np.random.uniform( - low=self._boundaries.lower(), - high=self._boundaries.upper(), - size=(self._n, self._dim), - ) - if self._boundaries - else np.random.normal(self._x0, self._sigma0, size=(self._n, self._dim)) - ) # Initialise best solution self._x_best = np.copy(x0) @@ -53,36 +41,33 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): def ask(self): """ - Returns a list of next points in the parameter-space - to evaluate from the optimiser. + Returns a list of positions to evaluate in the optimiser-space. """ self._ready_for_tell = True self._running = True - # Generate random solutions within the boundaries + # Generate random solutions if self._boundaries: self._candidates = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), size=(self._n, self._dim), ) - else: - self._candidates = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) - ) + return self._candidates + + self._candidates = np.random.normal( + self._x0, self._sigma0, size=(self._n, self._dim) + ) return self.clip_candidates(self._candidates) def tell(self, replies): """ - Receives a list of function values from the cost function from points - previously specified by `self.ask()`, and updates the optimiser state - accordingly. + Receives a list of cost function values from points previously specified + by `self.ask()`, and updates the optimiser state accordingly. """ if not self._ready_for_tell: raise RuntimeError("ask() must be called before tell().") - self._iterations += 1 - # Evaluate solutions and update the best for i in range(self._n): f_new = replies[i] diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index 38ef5d87..0cfe551b 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -662,6 +662,9 @@ class RandomSearch(BasePintsOptimiser): Random Search is a simple optimisation algorithm that samples parameter sets randomly within the given boundaries and identifies the best solution based on fitness. + This optimiser has been implemented for benchmarking and comparisons, convergence will be + better with one of other optimisers in the majority of cases. + Parameters ---------- cost : callable @@ -672,6 +675,9 @@ class RandomSearch(BasePintsOptimiser): Minimum number of iterations before termination. max_unchanged_iterations : int, optional (default=15) Maximum number of iterations without improvement before termination. + multistart : int, optional (default=1) + Number of optimiser restarts from randomly sample position. These positions + are sampled from the priors. parallel : bool, optional (default=False) Whether to run the optimisation in parallel. **optimiser_kwargs : optional From f08333066cb9517d5543ab3097a1844655b313ef Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 19:38:12 +0000 Subject: [PATCH 31/40] unit tests for RandomSearch added --- tests/unit/test_optimisation.py | 33 +++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index e07c059f..24f060c6 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -339,10 +339,35 @@ def test_cuckoo_no_bounds(self, cost): assert optim.optimiser._boundaries is None @pytest.mark.unit - def test_randomsearch(self, cost): - optim = pybop.RandomSearch(cost=cost, bounds=None, max_iterations=1) - optim.run() - assert optim.optimiser._boundaries is None + def test_randomsearch_bounds(self, two_param_cost): + # Initialize RandomSearch with boundaries + bounds = {"upper": [0.62,0.57], "lower": [0.58,0.53]} + + optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=bounds, max_iterations=1) + + # Define candidates outside boundaries + candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) + + # Use clip_candidates to clip to boundaries + clipped_candidates = optimiser.optimiser.clip_candidates(candidates) + + # Expected clipped candidates + expected_clipped = np.array([[0.58, 0.53], [0.62, 0.57]]) + + # Assert that the clipped candidates match the expected values + assert np.allclose(clipped_candidates, expected_clipped) + + # Initialize optimiser without boundaries + optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) + + # Define candidates outside typical boundaries + candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) + + # Use clip_candidates, which should return the input unmodified + clipped_candidates = optimiser.optimiser.clip_candidates(candidates) + + # Assert that the clipped candidates are unchanged + assert np.allclose(clipped_candidates, candidates) @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): From 0224762a5af600970412d5933a4e221b523386d9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:38:49 +0000 Subject: [PATCH 32/40] style: pre-commit fixes --- tests/unit/test_optimisation.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 24f060c6..4cb5d6d6 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -341,9 +341,11 @@ def test_cuckoo_no_bounds(self, cost): @pytest.mark.unit def test_randomsearch_bounds(self, two_param_cost): # Initialize RandomSearch with boundaries - bounds = {"upper": [0.62,0.57], "lower": [0.58,0.53]} + bounds = {"upper": [0.62, 0.57], "lower": [0.58, 0.53]} - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=bounds, max_iterations=1) + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=bounds, max_iterations=1 + ) # Define candidates outside boundaries candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) @@ -358,7 +360,9 @@ def test_randomsearch_bounds(self, two_param_cost): assert np.allclose(clipped_candidates, expected_clipped) # Initialize optimiser without boundaries - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=None, max_iterations=1 + ) # Define candidates outside typical boundaries candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) From 896139f10eadf7f1b4a7ba9cd34bd448c9a2489f Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 20:13:10 +0000 Subject: [PATCH 33/40] unit tests modified --- tests/unit/test_optimisation.py | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 24f060c6..329d1ffa 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -340,33 +340,18 @@ def test_cuckoo_no_bounds(self, cost): @pytest.mark.unit def test_randomsearch_bounds(self, two_param_cost): - # Initialize RandomSearch with boundaries + # Test clip_candidates with bound bounds = {"upper": [0.62,0.57], "lower": [0.58,0.53]} - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=bounds, max_iterations=1) - - # Define candidates outside boundaries candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) - - # Use clip_candidates to clip to boundaries clipped_candidates = optimiser.optimiser.clip_candidates(candidates) - - # Expected clipped candidates expected_clipped = np.array([[0.58, 0.53], [0.62, 0.57]]) - - # Assert that the clipped candidates match the expected values assert np.allclose(clipped_candidates, expected_clipped) - # Initialize optimiser without boundaries + # Test clip_candidates without bound optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) - - # Define candidates outside typical boundaries candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) - - # Use clip_candidates, which should return the input unmodified clipped_candidates = optimiser.optimiser.clip_candidates(candidates) - - # Assert that the clipped candidates are unchanged assert np.allclose(clipped_candidates, candidates) @pytest.mark.unit From fb7ee337853e2784a093d215956475075c7efafb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 20:15:30 +0000 Subject: [PATCH 34/40] style: pre-commit fixes --- tests/unit/test_optimisation.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 329d1ffa..ec9ae1d6 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -341,15 +341,19 @@ def test_cuckoo_no_bounds(self, cost): @pytest.mark.unit def test_randomsearch_bounds(self, two_param_cost): # Test clip_candidates with bound - bounds = {"upper": [0.62,0.57], "lower": [0.58,0.53]} - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=bounds, max_iterations=1) + bounds = {"upper": [0.62, 0.57], "lower": [0.58, 0.53]} + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=bounds, max_iterations=1 + ) candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) clipped_candidates = optimiser.optimiser.clip_candidates(candidates) expected_clipped = np.array([[0.58, 0.53], [0.62, 0.57]]) assert np.allclose(clipped_candidates, expected_clipped) # Test clip_candidates without bound - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=None, max_iterations=1 + ) candidates = np.array([[0.57, 0.52], [0.63, 0.58]]) clipped_candidates = optimiser.optimiser.clip_candidates(candidates) assert np.allclose(clipped_candidates, candidates) From 77cbd7e5f0ab7a54000c17a9813f92498469a05c Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 20:30:07 +0000 Subject: [PATCH 35/40] changelog updated --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 657e7ec4..f1256304 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ ## Optimisations +- [#580](https://github.com/pybop-team/PyBOP/pull/580) - Random Search optimiser is implimented. - [#512](https://github.com/pybop-team/PyBOP/pull/513) - Refactors `LogPosterior` with attributes pointing to composed likelihood object. - [#551](https://github.com/pybop-team/PyBOP/pull/551) - Refactors Optimiser arguments, `population_size` and `max_iterations` as default args, improves optimiser docstrings From b89bf59b84df068e6407d0f655514c6b67eded4a Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 21:26:58 +0000 Subject: [PATCH 36/40] unit tests added --- tests/unit/test_optimisation.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 3f3ecd51..71356a25 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -364,6 +364,23 @@ def test_randomsearch_bounds(self, two_param_cost): clipped_candidates = optimiser.optimiser.clip_candidates(candidates) assert np.allclose(clipped_candidates, candidates) + @pytest.mark.unit + def test_randomsearch_ask_without_bounds(two_param_cost): + # Initialize optimiser without boundaries + optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) + optimiser.optimiser._x0 = np.array([0.6, 0.55]) + optimiser.optimiser._sigma0 = 0.05 + optimiser.optimiser._n = 2 + optimiser.optimiser._dim = 2 + + # Call ask to generate candidates + candidates = optimiser.optimiser.ask() + + # Assert the shape of generated candidates + assert candidates.shape == (2, 2) + assert np.all(candidates >= optimiser.optimiser._x0 - 3 * optimiser.optimiser._sigma0) + assert np.all(candidates <= optimiser.optimiser._x0 + 3 * optimiser.optimiser._sigma0) + @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): # Check a method that uses gradient information From fffc0db6c169b9cb99e3554f4feab197a8f24d97 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:27:17 +0000 Subject: [PATCH 37/40] style: pre-commit fixes --- tests/unit/test_optimisation.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 71356a25..2dcb6e17 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -367,7 +367,9 @@ def test_randomsearch_bounds(self, two_param_cost): @pytest.mark.unit def test_randomsearch_ask_without_bounds(two_param_cost): # Initialize optimiser without boundaries - optimiser = pybop.RandomSearch(cost=two_param_cost, bounds=None, max_iterations=1) + optimiser = pybop.RandomSearch( + cost=two_param_cost, bounds=None, max_iterations=1 + ) optimiser.optimiser._x0 = np.array([0.6, 0.55]) optimiser.optimiser._sigma0 = 0.05 optimiser.optimiser._n = 2 @@ -378,8 +380,12 @@ def test_randomsearch_ask_without_bounds(two_param_cost): # Assert the shape of generated candidates assert candidates.shape == (2, 2) - assert np.all(candidates >= optimiser.optimiser._x0 - 3 * optimiser.optimiser._sigma0) - assert np.all(candidates <= optimiser.optimiser._x0 + 3 * optimiser.optimiser._sigma0) + assert np.all( + candidates >= optimiser.optimiser._x0 - 3 * optimiser.optimiser._sigma0 + ) + assert np.all( + candidates <= optimiser.optimiser._x0 + 3 * optimiser.optimiser._sigma0 + ) @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost): From a4285f889b4408ef58f0b53f59f3aed342dc42ff Mon Sep 17 00:00:00 2001 From: Dibyendu-IITKGP Date: Wed, 18 Dec 2024 21:34:05 +0000 Subject: [PATCH 38/40] unit test modified --- tests/unit/test_optimisation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 2dcb6e17..01968ddc 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -365,7 +365,7 @@ def test_randomsearch_bounds(self, two_param_cost): assert np.allclose(clipped_candidates, candidates) @pytest.mark.unit - def test_randomsearch_ask_without_bounds(two_param_cost): + def test_randomsearch_ask_without_bounds(self, two_param_cost): # Initialize optimiser without boundaries optimiser = pybop.RandomSearch( cost=two_param_cost, bounds=None, max_iterations=1 From aaf0a7796b44502805917d4f250ebea5998c4678 Mon Sep 17 00:00:00 2001 From: Brady Planden <55357039+BradyPlanden@users.noreply.github.com> Date: Sat, 21 Dec 2024 13:43:59 +0000 Subject: [PATCH 39/40] Apply suggestions from code review --- tests/unit/test_optimisation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 01968ddc..43a2708d 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -381,10 +381,10 @@ def test_randomsearch_ask_without_bounds(self, two_param_cost): # Assert the shape of generated candidates assert candidates.shape == (2, 2) assert np.all( - candidates >= optimiser.optimiser._x0 - 3 * optimiser.optimiser._sigma0 + candidates >= optimiser.optimiser._x0 - 6 * optimiser.optimiser._sigma0 ) assert np.all( - candidates <= optimiser.optimiser._x0 + 3 * optimiser.optimiser._sigma0 + candidates <= optimiser.optimiser._x0 + 6 * optimiser.optimiser._sigma0 ) @pytest.mark.unit From 753a6de998e10ab72e7d66158afde1d46316c93e Mon Sep 17 00:00:00 2001 From: Brady Planden Date: Sat, 21 Dec 2024 14:21:30 +0000 Subject: [PATCH 40/40] Updates unit tests, upper pin to BPX, bugfix _cuckoo and _random_search for population_size setting. --- pybop/optimisers/_cuckoo.py | 11 +++++------ pybop/optimisers/_random_search.py | 9 +++------ pyproject.toml | 2 +- tests/unit/test_optimisation.py | 27 ++++++++++++--------------- 4 files changed, 21 insertions(+), 28 deletions(-) diff --git a/pybop/optimisers/_cuckoo.py b/pybop/optimisers/_cuckoo.py index dba591d7..b6f2e5c1 100644 --- a/pybop/optimisers/_cuckoo.py +++ b/pybop/optimisers/_cuckoo.py @@ -54,7 +54,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25): self._dim = len(x0) # Population size and abandon rate - self._n = self._population_size self._pa = pa self.step_size = self._sigma0 self.beta = 1.5 @@ -68,14 +67,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25): self._nests = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), - size=(self._n, self._dim), + size=(self._population_size, self._dim), ) else: self._nests = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._x0, self._sigma0, size=(self._population_size, self._dim) ) - self._fitness = np.full(self._n, np.inf) + self._fitness = np.full(self._population_size, np.inf) # Initialise best solutions self._x_best = np.copy(x0) @@ -112,7 +111,7 @@ def tell(self, replies): self._iterations += 1 # Compare cuckoos with current nests - for i in range(self._n): + for i in range(self._population_size): f_new = replies[i] if f_new < self._fitness[i]: self._nests[i] = self.cuckoos[i] @@ -122,7 +121,7 @@ def tell(self, replies): self._x_best = self.cuckoos[i] # Abandon some worse nests - n_abandon = int(self._pa * self._n) + n_abandon = int(self._pa * self._population_size) worst_nests = np.argsort(self._fitness)[-n_abandon:] for idx in worst_nests: self.abandon_nests(idx) diff --git a/pybop/optimisers/_random_search.py b/pybop/optimisers/_random_search.py index 97f903f7..d3737801 100644 --- a/pybop/optimisers/_random_search.py +++ b/pybop/optimisers/_random_search.py @@ -30,9 +30,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None): # Problem dimensionality self._dim = len(x0) - # Optimisation parameters - self._n = self._population_size - # Initialise best solution self._x_best = np.copy(x0) self._f_best = np.inf @@ -51,12 +48,12 @@ def ask(self): self._candidates = np.random.uniform( low=self._boundaries.lower(), high=self._boundaries.upper(), - size=(self._n, self._dim), + size=(self._population_size, self._dim), ) return self._candidates self._candidates = np.random.normal( - self._x0, self._sigma0, size=(self._n, self._dim) + self._x0, self._sigma0, size=(self._population_size, self._dim) ) return self.clip_candidates(self._candidates) @@ -69,7 +66,7 @@ def tell(self, replies): raise RuntimeError("ask() must be called before tell().") # Evaluate solutions and update the best - for i in range(self._n): + for i in range(self._population_size): f_new = replies[i] if f_new < self._f_best: self._f_best = f_new diff --git a/pyproject.toml b/pyproject.toml index 5e1c7444..7a2fd4f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ scifem = [ "scikit-fem>=8.1.0" # scikit-fem is a dependency for the multi-dimensional pybamm models ] bpx = [ - "bpx>=0.4", + "bpx<0.5", ] all = ["pybop[plot,scifem,bpx]"] diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 43a2708d..d69d93c8 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -367,25 +367,22 @@ def test_randomsearch_bounds(self, two_param_cost): @pytest.mark.unit def test_randomsearch_ask_without_bounds(self, two_param_cost): # Initialize optimiser without boundaries - optimiser = pybop.RandomSearch( - cost=two_param_cost, bounds=None, max_iterations=1 + optim = pybop.RandomSearch( + cost=two_param_cost, + sigma0=0.05, + x0=[0.6, 0.55], + bounds=None, + max_iterations=1, ) - optimiser.optimiser._x0 = np.array([0.6, 0.55]) - optimiser.optimiser._sigma0 = 0.05 - optimiser.optimiser._n = 2 - optimiser.optimiser._dim = 2 - # Call ask to generate candidates - candidates = optimiser.optimiser.ask() + # Set population size, generate candidates + optim.set_population_size(2) + candidates = optim.optimiser.ask() - # Assert the shape of generated candidates + # Assert the shape of the candidates assert candidates.shape == (2, 2) - assert np.all( - candidates >= optimiser.optimiser._x0 - 6 * optimiser.optimiser._sigma0 - ) - assert np.all( - candidates <= optimiser.optimiser._x0 + 6 * optimiser.optimiser._sigma0 - ) + assert np.all(candidates >= optim.optimiser._x0 - 6 * optim.optimiser._sigma0) + assert np.all(candidates <= optim.optimiser._x0 + 6 * optim.optimiser._sigma0) @pytest.mark.unit def test_scipy_minimize_with_jac(self, cost):