Skip to content

Commit

Permalink
random_search optimiser added in the pints framework (#580)
Browse files Browse the repository at this point in the history
* random_search optimiser added in the pints framework

* style: pre-commit fixes

* description texts updated

* style: pre-commit fixes

* random_search updated

* population size input in random search modified

* example updated

* unit tests added for randomsearch

* style: pre-commit fixes

* none type boundary handling modified

* style: pre-commit fixes

* updated

* updated

* unit tests updated

* unit test updated

* unit tests modified

* updated

* style: pre-commit fixes

* randomsearch modified

* style: pre-commit fixes

* boundary logic updated

* unit tests updated

* unit tests updated

* unit test changed

* style: pre-commit fixes

* fix: RandomSearch with multistart

* unit test updated

* unit test modified

* suggested changes incorporated

* suggested changes updated

* unit tests for RandomSearch added

* style: pre-commit fixes

* unit tests modified

* style: pre-commit fixes

* changelog updated

* unit tests added

* style: pre-commit fixes

* unit test modified

* Apply suggestions from code review

* Updates unit tests, upper pin to BPX, bugfix _cuckoo and _random_search for population_size setting.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Brady Planden <[email protected]>
Co-authored-by: Brady Planden <[email protected]>
  • Loading branch information
4 people authored Dec 21, 2024
1 parent c637f62 commit a15f7c4
Show file tree
Hide file tree
Showing 7 changed files with 311 additions and 7 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

## Optimisations

- [#580](https://github.com/pybop-team/PyBOP/pull/580) - Random Search optimiser is implimented.
- [#588](https://github.com/pybop-team/PyBOP/pull/588) - Makes `minimising` a property of `BaseOptimiser` set by the cost class.
- [#512](https://github.com/pybop-team/PyBOP/pull/513) - Refactors `LogPosterior` with attributes pointing to composed likelihood object.
- [#551](https://github.com/pybop-team/PyBOP/pull/551) - Refactors Optimiser arguments, `population_size` and `max_iterations` as default args, improves optimiser docstrings
Expand Down
73 changes: 73 additions & 0 deletions examples/scripts/comparison_examples/random_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import numpy as np

import pybop

# Define model
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
parameter_set.update(
{
"Negative electrode active material volume fraction": 0.7,
"Positive electrode active material volume fraction": 0.67,
}
)
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = pybop.Parameters(
pybop.Parameter(
"Negative electrode active material volume fraction",
bounds=[0.4, 0.75],
initial_value=0.41,
),
pybop.Parameter(
"Positive electrode active material volume fraction",
bounds=[0.4, 0.75],
initial_value=0.41,
),
)
experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 3 minutes (4 second period)",
"Charge at 0.5C for 3 minutes (4 second period)",
),
]
)
values = model.predict(initial_state={"Initial SoC": 0.7}, experiment=experiment)

sigma = 0.002
corrupt_values = values["Voltage [V]"].data + np.random.normal(
0, sigma, len(values["Voltage [V]"].data)
)

# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": values["Time [s]"].data,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": corrupt_values,
}
)

# Generate problem, cost function, and optimisation class
problem = pybop.FittingProblem(model, parameters, dataset)
cost = pybop.GaussianLogLikelihood(problem, sigma0=sigma * 4)
optim = pybop.Optimisation(
cost,
optimiser=pybop.RandomSearch,
max_iterations=100,
)

results = optim.run()

# Plot the timeseries output
pybop.plot.quick(problem, problem_inputs=results.x, title="Optimised Comparison")

# Plot convergence
pybop.plot.convergence(optim)

# Plot the parameter traces
pybop.plot.parameters(optim)

# Plot the cost landscape with optimisation path
pybop.plot.contour(optim, steps=10)
2 changes: 2 additions & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@
#

from .optimisers._cuckoo import CuckooSearchImpl
from .optimisers._random_search import RandomSearchImpl
from .optimisers._adamw import AdamWImpl
from .optimisers._gradient_descent import GradientDescentImpl
from .optimisers.base_optimiser import BaseOptimiser, OptimisationResult, MultiOptimisationResult
Expand All @@ -142,6 +143,7 @@
SNES,
XNES,
CuckooSearch,
RandomSearch,
AdamW,
)
from .optimisers.optimisation import Optimisation
Expand Down
11 changes: 5 additions & 6 deletions pybop/optimisers/_cuckoo.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25):
self._dim = len(x0)

# Population size and abandon rate
self._n = self._population_size
self._pa = pa
self.step_size = self._sigma0
self.beta = 1.5
Expand All @@ -68,14 +67,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25):
self._nests = np.random.uniform(
low=self._boundaries.lower(),
high=self._boundaries.upper(),
size=(self._n, self._dim),
size=(self._population_size, self._dim),
)
else:
self._nests = np.random.normal(
self._x0, self._sigma0, size=(self._n, self._dim)
self._x0, self._sigma0, size=(self._population_size, self._dim)
)

self._fitness = np.full(self._n, np.inf)
self._fitness = np.full(self._population_size, np.inf)

# Initialise best solutions
self._x_best = np.copy(x0)
Expand Down Expand Up @@ -112,7 +111,7 @@ def tell(self, replies):
self._iterations += 1

# Compare cuckoos with current nests
for i in range(self._n):
for i in range(self._population_size):
f_new = replies[i]
if f_new < self._fitness[i]:
self._nests[i] = self.cuckoos[i]
Expand All @@ -122,7 +121,7 @@ def tell(self, replies):
self._x_best = self.cuckoos[i]

# Abandon some worse nests
n_abandon = int(self._pa * self._n)
n_abandon = int(self._pa * self._population_size)
worst_nests = np.argsort(self._fitness)[-n_abandon:]
for idx in worst_nests:
self.abandon_nests(idx)
Expand Down
111 changes: 111 additions & 0 deletions pybop/optimisers/_random_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import numpy as np
from pints import PopulationBasedOptimiser


class RandomSearchImpl(PopulationBasedOptimiser):
"""
Random Search (RS) optimisation algorithm.
This algorithm explores the parameter space by randomly sampling points.
The algorithm does the following:
1. Initialise a population of solutions.
2. At each iteration, generate `n` number of random positions within boundaries.
3. Evaluate the quality/fitness of the positions.
4. Replace the best position with improved position if found.
Parameters:
population_size (optional): Number of solutions to evaluate per iteration.
References:
The Random Search algorithm implemented in this work is based on principles outlined
in "Introduction to Stochastic Search and Optimization: Estimation, Simulation, and
Control" by Spall, J. C. (2003).
The implementation inherits from the PINTS PopulationOptimiser.
"""

def __init__(self, x0, sigma0=0.05, boundaries=None):
super().__init__(x0, sigma0, boundaries=boundaries)

# Problem dimensionality
self._dim = len(x0)

# Initialise best solution
self._x_best = np.copy(x0)
self._f_best = np.inf
self._running = False
self._ready_for_tell = False

def ask(self):
"""
Returns a list of positions to evaluate in the optimiser-space.
"""
self._ready_for_tell = True
self._running = True

# Generate random solutions
if self._boundaries:
self._candidates = np.random.uniform(
low=self._boundaries.lower(),
high=self._boundaries.upper(),
size=(self._population_size, self._dim),
)
return self._candidates

self._candidates = np.random.normal(
self._x0, self._sigma0, size=(self._population_size, self._dim)
)
return self.clip_candidates(self._candidates)

def tell(self, replies):
"""
Receives a list of cost function values from points previously specified
by `self.ask()`, and updates the optimiser state accordingly.
"""
if not self._ready_for_tell:
raise RuntimeError("ask() must be called before tell().")

# Evaluate solutions and update the best
for i in range(self._population_size):
f_new = replies[i]
if f_new < self._f_best:
self._f_best = f_new
self._x_best = self._candidates[i]

def running(self):
"""
Returns ``True`` if the optimisation is in progress.
"""
return self._running

def x_best(self):
"""
Returns the best parameter values found so far.
"""
return self._x_best

def f_best(self):
"""
Returns the best score found so far.
"""
return self._f_best

def name(self):
"""
Returns the name of the optimiser.
"""
return "Random Search"

def clip_candidates(self, x):
"""
Clip the input array to the boundaries if available.
"""
if self._boundaries:
x = np.clip(x, self._boundaries.lower(), self._boundaries.upper())
return x

def _suggested_population_size(self):
"""
Returns a suggested population size based on the dimension of the parameter space.
"""
return 4 + int(3 * np.log(self._n_parameters))
71 changes: 71 additions & 0 deletions pybop/optimisers/pints_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
BasePintsOptimiser,
CuckooSearchImpl,
GradientDescentImpl,
RandomSearchImpl,
)


Expand Down Expand Up @@ -652,3 +653,73 @@ def __init__(
parallel,
**optimiser_kwargs,
)


class RandomSearch(BasePintsOptimiser):
"""
Adapter for the Random Search optimiser in PyBOP.
Random Search is a simple optimisation algorithm that samples parameter sets randomly
within the given boundaries and identifies the best solution based on fitness.
This optimiser has been implemented for benchmarking and comparisons, convergence will be
better with one of other optimisers in the majority of cases.
Parameters
----------
cost : callable
The cost function to be minimized.
max_iterations : int, optional
Maximum number of iterations for the optimisation.
min_iterations : int, optional (default=2)
Minimum number of iterations before termination.
max_unchanged_iterations : int, optional (default=15)
Maximum number of iterations without improvement before termination.
multistart : int, optional (default=1)
Number of optimiser restarts from randomly sample position. These positions
are sampled from the priors.
parallel : bool, optional (default=False)
Whether to run the optimisation in parallel.
**optimiser_kwargs : optional
Valid PINTS option keys and their values, for example:
x0 : array_like
Initial position from which optimisation will start.
population_size : int
Number of solutions to evaluate per iteration.
bounds : dict
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
upper bounds on the parameters.
absolute_tolerance : float
Absolute tolerance for convergence checking.
relative_tolerance : float
Relative tolerance for convergence checking.
max_evaluations : int
Maximum number of function evaluations.
threshold : float
Threshold value for early termination.
See Also
--------
pybop.RandomSearchImpl : PyBOP implementation of Random Search algorithm.
"""

def __init__(
self,
cost,
max_iterations: int = None,
min_iterations: int = 2,
max_unchanged_iterations: int = 15,
multistart: int = 1,
parallel: bool = False,
**optimiser_kwargs,
):
super().__init__(
cost,
RandomSearchImpl,
max_iterations,
min_iterations,
max_unchanged_iterations,
multistart,
parallel,
**optimiser_kwargs,
)
Loading

0 comments on commit a15f7c4

Please sign in to comment.