Skip to content

Commit

Permalink
random_search optimiser added in the pints framework
Browse files Browse the repository at this point in the history
  • Loading branch information
Dibyendu-IITKGP committed Dec 5, 2024
1 parent f4c62c8 commit fcdee74
Show file tree
Hide file tree
Showing 4 changed files with 253 additions and 0 deletions.
75 changes: 75 additions & 0 deletions examples/scripts/comparison_examples/random_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import numpy as np

import pybop

# Define model
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
parameter_set.update(
{
"Negative electrode active material volume fraction": 0.7,
"Positive electrode active material volume fraction": 0.67,
}
)
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = pybop.Parameters(
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.6, 0.05),
bounds=[0.4, 0.75],
initial_value=0.41,
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.48, 0.05),
bounds=[0.4, 0.75],
initial_value=0.41,
),
)
experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 3 minutes (4 second period)",
"Charge at 0.5C for 3 minutes (4 second period)",
),
]
)
values = model.predict(initial_state={"Initial SoC": 0.7}, experiment=experiment)

sigma = 0.002
corrupt_values = values["Voltage [V]"].data + np.random.normal(
0, sigma, len(values["Voltage [V]"].data)
)

# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": values["Time [s]"].data,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": corrupt_values,
}
)

# Generate problem, cost function, and optimisation class
problem = pybop.FittingProblem(model, parameters, dataset)
cost = pybop.GaussianLogLikelihood(problem, sigma0=sigma * 4)
optim = pybop.Optimisation(
cost,
optimiser=pybop.RandomSearch,
max_iterations=100,
)

results = optim.run()

# Plot the timeseries output
pybop.plot.quick(problem, problem_inputs=results.x, title="Optimised Comparison")

# Plot convergence
pybop.plot.convergence(optim)

# Plot the parameter traces
pybop.plot.parameters(optim)

# Plot the cost landscape with optimisation path
pybop.plot.contour(optim, steps=15)
2 changes: 2 additions & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
#

from .optimisers._cuckoo import CuckooSearchImpl
from .optimisers._random_search import RandomSearchImpl
from .optimisers._adamw import AdamWImpl
from .optimisers._gradient_descent import GradientDescentImpl
from .optimisers.base_optimiser import BaseOptimiser, OptimisationResult
Expand All @@ -142,6 +143,7 @@
SNES,
XNES,
CuckooSearch,
RandomSearch,
AdamW,
)
from .optimisers.optimisation import Optimisation
Expand Down
111 changes: 111 additions & 0 deletions pybop/optimisers/_random_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import numpy as np
from pints import PopulationBasedOptimiser


class RandomSearchImpl(PopulationBasedOptimiser):
"""
Random Search (RS) optimisation algorithm.
This algorithm explores the parameter space by randomly sampling points.
The algorithm is simple:
1. Initialise a population of solutions.
2. At each iteration, generate new random solutions within boundaries.
3. Evaluate the quality/fitness of the solutions.
4. Update the best solution found so far.
Parameters:
- population_size (optional): Number of solutions to evaluate per iteration.
References:
- The Random Search algorithm implemented in this work is based on principles outlined
in:
Introduction to Stochastic Search and Optimization: Estimation, Simulation, and Control
- by Spall, J. C. (2003).
The implementation leverages the pints library framework, which provides tools for
population-based optimization methods.
"""

def __init__(self, x0, sigma0=0.05, boundaries=None, population_size=None):
# Problem dimensionality
self._dim = len(x0) # Initialize _dim first

super().__init__(x0, sigma0, boundaries=boundaries)

# Population size, defaulting to a suggested value
self._population_size = population_size or self._suggested_population_size()
self.step_size = self._sigma0

# Initialise best solutions
self._x_best = np.copy(x0)
self._f_best = np.inf

# Iteration counter
self._iterations = 0

# Flags
self._running = False
self._ready_for_tell = False

def ask(self):
"""
Returns a list of next points in the parameter-space
to evaluate from the optimiser.
"""
self._ready_for_tell = True
self._running = True

# Generate random solutions within the boundaries
self._candidates = np.random.uniform(
low=self._boundaries.lower(),
high=self._boundaries.upper(),
size=(self._population_size, self._dim),
)
return self._candidates

def tell(self, replies):
"""
Receives a list of function values from the cost function from points
previously specified by `self.ask()`, and updates the optimiser state
accordingly.
"""
if not self._ready_for_tell:
raise RuntimeError("Optimiser not ready for tell()")

self._iterations += 1
self._ready_for_tell = False

# Update the best solution
for i, fitness in enumerate(replies):
if fitness < self._f_best:
self._f_best = fitness
self._x_best = self._candidates[i]

def running(self):
"""
Returns ``True`` if the optimisation is in progress.
"""
return self._running

def x_best(self):
"""
Returns the best parameter values found so far.
"""
return self._x_best

def f_best(self):
"""
Returns the best score found so far.
"""
return self._f_best

def name(self):
"""
Returns the name of the optimiser.
"""
return "Random Search"

def _suggested_population_size(self):
"""
Returns a suggested population size based on the dimension of the parameter space.
"""
return 10 + int(2 * np.log(self._dim))
65 changes: 65 additions & 0 deletions pybop/optimisers/pints_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
BasePintsOptimiser,
CuckooSearchImpl,
GradientDescentImpl,
RandomSearchImpl,
)


Expand Down Expand Up @@ -682,3 +683,67 @@ def __init__(
parallel,
**optimiser_kwargs,
)

class RandomSearch(BasePintsOptimiser):
"""
Adapter for the Random Search optimiser in PyBOP.
Random Search is a simple optimisation algorithm that samples parameter
sets randomly within the given boundaries and identifies the best solution
based on fitness.
Parameters
----------
cost : callable
The cost function to be minimized.
max_iterations : int, optional
Maximum number of iterations for the optimisation.
min_iterations : int, optional (default=2)
Minimum number of iterations before termination.
max_unchanged_iterations : int, optional (default=15)
Maximum number of iterations without improvement before termination.
parallel : bool, optional (default=False)
Whether to run the optimisation in parallel.
**optimiser_kwargs : optional
Valid PINTS option keys and their values, for example:
x0 : array_like
Initial position from which optimisation will start.
population_size : int
Number of solutions to evaluate per iteration.
bounds : dict
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
upper bounds on the parameters.
absolute_tolerance : float
Absolute tolerance for convergence checking.
relative_tolerance : float
Relative tolerance for convergence checking.
max_evaluations : int
Maximum number of function evaluations.
threshold : float
Threshold value for early termination.
See Also
--------
pybop.RandomSearchImpl : PyBOP implementation of Random Search algorithm.
"""

def __init__(
self,
cost,
max_iterations: int = None,
min_iterations: int = 20,
max_unchanged_iterations: int = 100,
population_size: int = 10,
parallel: bool = False,
**optimiser_kwargs,
):

super().__init__(
cost,
RandomSearchImpl,
max_iterations,
min_iterations,
max_unchanged_iterations,
parallel,
**optimiser_kwargs,
)

0 comments on commit fcdee74

Please sign in to comment.