Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Nelder-Mead optimiser from PINTS #254

Merged
merged 7 commits into from
Mar 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## Features

- [#195](https://github.com/pybop-team/PyBOP/issues/195) - Adds the Nelder-Mead optimiser from PINTS as another option.

## Bug Fixes

Expand Down
82 changes: 82 additions & 0 deletions examples/scripts/spm_NelderMead.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import numpy as np

import pybop

# Parameter set and model definition
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = [
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.68, 0.05),
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.58, 0.05),
),
]

# Generate data
init_soc = 0.5
sigma = 0.003
experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 3 minutes (1 second period)",
"Charge at 0.5C for 3 minutes (1 second period)",
),
]
* 2
)
values = model.predict(init_soc=init_soc, experiment=experiment)


def noise(sigma):
return np.random.normal(0, sigma, len(values["Voltage [V]"].data))


# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": values["Time [s]"].data,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": values["Voltage [V]"].data + noise(sigma),
"Bulk open-circuit voltage [V]": values["Bulk open-circuit voltage [V]"].data
+ noise(sigma),
}
)

signal = ["Voltage [V]", "Bulk open-circuit voltage [V]"]
# Generate problem, cost function, and optimisation class
problem = pybop.FittingProblem(
model, parameters, dataset, signal=signal, init_soc=init_soc
)
cost = pybop.RootMeanSquaredError(problem)
optim = pybop.Optimisation(
cost,
optimiser=pybop.NelderMead,
verbose=True,
allow_infeasible_solutions=True,
sigma0=0.05,
)
optim.set_max_iterations(100)
optim.set_max_unchanged_iterations(45)

# Run optimisation
x, final_cost = optim.run()
print("Estimated parameters:", x)

# Plot the timeseries output
pybop.quick_plot(problem, parameter_values=x, title="Optimised Comparison")

# Plot convergence
pybop.plot_convergence(optim)

# Plot the parameter traces
pybop.plot_parameters(optim)

# Plot the cost landscape with optimisation path
bounds = np.array([[0.5, 0.8], [0.4, 0.7]])
pybop.plot2d(optim, bounds=bounds, steps=15)
1 change: 1 addition & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@
Adam,
CMAES,
IRPropMin,
NelderMead,
PSO,
SNES,
XNES,
Expand Down
58 changes: 48 additions & 10 deletions pybop/optimisers/pints_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class GradientDescent(pints.GradientDescent):
Initial position from which optimization will start.
sigma0 : float, optional
Initial step size (default is 0.1).
bounds : sequence or ``Bounds``, optional
bounds : dict, optional
Ignored by this optimiser, provided for API consistency.

See Also
Expand Down Expand Up @@ -46,7 +46,7 @@ class Adam(pints.Adam):
Initial position from which optimization will start.
sigma0 : float, optional
Initial step size (default is 0.1).
bounds : sequence or ``Bounds``, optional
bounds : dict, optional
Ignored by this optimiser, provided for API consistency.

See Also
Expand Down Expand Up @@ -77,7 +77,8 @@ class IRPropMin(pints.IRPropMin):
sigma0 : float, optional
Initial step size (default is 0.1).
bounds : dict, optional
Lower and upper bounds for each optimization parameter.
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper
bounds on the parameters.

See Also
--------
Expand Down Expand Up @@ -109,7 +110,8 @@ class PSO(pints.PSO):
sigma0 : float, optional
Spread of the initial particle positions (default is 0.1).
bounds : dict, optional
Lower and upper bounds for each optimization parameter.
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper
bounds on the parameters.

See Also
--------
Expand Down Expand Up @@ -147,7 +149,8 @@ class SNES(pints.SNES):
sigma0 : float, optional
Initial standard deviation of the sampling distribution, defaults to 0.1.
bounds : dict, optional
Lower and upper bounds for each optimization parameter.
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper
bounds on the parameters.

See Also
--------
Expand All @@ -168,7 +171,9 @@ class XNES(pints.XNES):
"""
Implements the Exponential Natural Evolution Strategy (XNES) optimiser from PINTS.

XNES is an evolutionary algorithm that samples from a multivariate normal distribution, which is updated iteratively to fit the distribution of successful solutions.
XNES is an evolutionary algorithm that samples from a multivariate normal
distribution, which is updated iteratively to fit the distribution of successful
solutions.

Parameters
----------
Expand All @@ -177,7 +182,8 @@ class XNES(pints.XNES):
sigma0 : float, optional
Initial standard deviation of the sampling distribution, defaults to 0.1.
bounds : dict, optional
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper bounds on the parameters. If ``None``, no bounds are enforced.
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper
bounds on the parameters. If ``None``, no bounds are enforced.

See Also
--------
Expand All @@ -194,12 +200,44 @@ def __init__(self, x0, sigma0=0.1, bounds=None):
super().__init__(x0, sigma0, self.boundaries)


class NelderMead(pints.NelderMead):
"""
Implements the Nelder-Mead downhill simplex method from PINTS.

This is a deterministic local optimiser. In most update steps it performs
either one evaluation, or two sequential evaluations, so that it will not
typically benefit from parallelisation.

Parameters
----------
x0 : array_like
The initial parameter vector to optimize.
sigma0 : float, optional
Initial standard deviation of the sampling distribution, defaults to 0.1.
Does not appear to be used.
bounds : dict, optional
Ignored by this optimiser, provided for API consistency.

See Also
--------
pints.NelderMead : PINTS implementation of Nelder-Mead algorithm.
"""

def __init__(self, x0, sigma0=0.1, bounds=None):
if bounds is not None:
print("NOTE: Boundaries ignored by NelderMead")

self.boundaries = None # Bounds ignored in pints.NelderMead
super().__init__(x0, sigma0, self.boundaries)


class CMAES(pints.CMAES):
"""
Adapter for the Covariance Matrix Adaptation Evolution Strategy (CMA-ES) optimiser in PINTS.

CMA-ES is an evolutionary algorithm for difficult non-linear non-convex optimization problems.
It adapts the covariance matrix of a multivariate normal distribution to capture the shape of the cost landscape.
It adapts the covariance matrix of a multivariate normal distribution to capture the shape of
the cost landscape.

Parameters
----------
Expand All @@ -208,8 +246,8 @@ class CMAES(pints.CMAES):
sigma0 : float, optional
Initial standard deviation of the sampling distribution, defaults to 0.1.
bounds : dict, optional
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper bounds on the parameters.
If ``None``, no bounds are enforced.
A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper
bounds on the parameters. If ``None``, no bounds are enforced.

See Also
--------
Expand Down
1 change: 1 addition & 0 deletions tests/integration/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
pybop.CMAES,
pybop.GradientDescent,
pybop.IRPropMin,
pybop.NelderMead,
pybop.PSO,
pybop.SNES,
pybop.XNES,
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def two_param_cost(self, model, two_parameters, dataset):
(pybop.XNES, "Exponential Natural Evolution Strategy (xNES)"),
(pybop.PSO, "Particle Swarm Optimisation (PSO)"),
(pybop.IRPropMin, "iRprop-"),
(pybop.NelderMead, "Nelder-Mead"),
],
)
@pytest.mark.unit
Expand Down
Loading