diff --git a/CHANGELOG.md b/CHANGELOG.md index db681366..cbea9cd6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## Features +- [#195](https://github.com/pybop-team/PyBOP/issues/195) - Adds the Nelder-Mead optimiser from PINTS as another option. ## Bug Fixes diff --git a/examples/scripts/spm_NelderMead.py b/examples/scripts/spm_NelderMead.py new file mode 100644 index 00000000..99abf5a8 --- /dev/null +++ b/examples/scripts/spm_NelderMead.py @@ -0,0 +1,82 @@ +import numpy as np + +import pybop + +# Parameter set and model definition +parameter_set = pybop.ParameterSet.pybamm("Chen2020") +model = pybop.lithium_ion.SPM(parameter_set=parameter_set) + +# Fitting parameters +parameters = [ + pybop.Parameter( + "Negative electrode active material volume fraction", + prior=pybop.Gaussian(0.68, 0.05), + ), + pybop.Parameter( + "Positive electrode active material volume fraction", + prior=pybop.Gaussian(0.58, 0.05), + ), +] + +# Generate data +init_soc = 0.5 +sigma = 0.003 +experiment = pybop.Experiment( + [ + ( + "Discharge at 0.5C for 3 minutes (1 second period)", + "Charge at 0.5C for 3 minutes (1 second period)", + ), + ] + * 2 +) +values = model.predict(init_soc=init_soc, experiment=experiment) + + +def noise(sigma): + return np.random.normal(0, sigma, len(values["Voltage [V]"].data)) + + +# Form dataset +dataset = pybop.Dataset( + { + "Time [s]": values["Time [s]"].data, + "Current function [A]": values["Current [A]"].data, + "Voltage [V]": values["Voltage [V]"].data + noise(sigma), + "Bulk open-circuit voltage [V]": values["Bulk open-circuit voltage [V]"].data + + noise(sigma), + } +) + +signal = ["Voltage [V]", "Bulk open-circuit voltage [V]"] +# Generate problem, cost function, and optimisation class +problem = pybop.FittingProblem( + model, parameters, dataset, signal=signal, init_soc=init_soc +) +cost = pybop.RootMeanSquaredError(problem) +optim = pybop.Optimisation( + cost, + optimiser=pybop.NelderMead, + verbose=True, + allow_infeasible_solutions=True, + sigma0=0.05, +) +optim.set_max_iterations(100) +optim.set_max_unchanged_iterations(45) + +# Run optimisation +x, final_cost = optim.run() +print("Estimated parameters:", x) + +# Plot the timeseries output +pybop.quick_plot(problem, parameter_values=x, title="Optimised Comparison") + +# Plot convergence +pybop.plot_convergence(optim) + +# Plot the parameter traces +pybop.plot_parameters(optim) + +# Plot the cost landscape with optimisation path +bounds = np.array([[0.5, 0.8], [0.4, 0.7]]) +pybop.plot2d(optim, bounds=bounds, steps=15) diff --git a/pybop/__init__.py b/pybop/__init__.py index 82b7ea6e..5ae5c4d2 100644 --- a/pybop/__init__.py +++ b/pybop/__init__.py @@ -88,6 +88,7 @@ Adam, CMAES, IRPropMin, + NelderMead, PSO, SNES, XNES, diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index ddebee90..1ee289f1 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -16,7 +16,7 @@ class GradientDescent(pints.GradientDescent): Initial position from which optimization will start. sigma0 : float, optional Initial step size (default is 0.1). - bounds : sequence or ``Bounds``, optional + bounds : dict, optional Ignored by this optimiser, provided for API consistency. See Also @@ -46,7 +46,7 @@ class Adam(pints.Adam): Initial position from which optimization will start. sigma0 : float, optional Initial step size (default is 0.1). - bounds : sequence or ``Bounds``, optional + bounds : dict, optional Ignored by this optimiser, provided for API consistency. See Also @@ -77,7 +77,8 @@ class IRPropMin(pints.IRPropMin): sigma0 : float, optional Initial step size (default is 0.1). bounds : dict, optional - Lower and upper bounds for each optimization parameter. + A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper + bounds on the parameters. See Also -------- @@ -109,7 +110,8 @@ class PSO(pints.PSO): sigma0 : float, optional Spread of the initial particle positions (default is 0.1). bounds : dict, optional - Lower and upper bounds for each optimization parameter. + A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper + bounds on the parameters. See Also -------- @@ -147,7 +149,8 @@ class SNES(pints.SNES): sigma0 : float, optional Initial standard deviation of the sampling distribution, defaults to 0.1. bounds : dict, optional - Lower and upper bounds for each optimization parameter. + A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper + bounds on the parameters. See Also -------- @@ -168,7 +171,9 @@ class XNES(pints.XNES): """ Implements the Exponential Natural Evolution Strategy (XNES) optimiser from PINTS. - XNES is an evolutionary algorithm that samples from a multivariate normal distribution, which is updated iteratively to fit the distribution of successful solutions. + XNES is an evolutionary algorithm that samples from a multivariate normal + distribution, which is updated iteratively to fit the distribution of successful + solutions. Parameters ---------- @@ -177,7 +182,8 @@ class XNES(pints.XNES): sigma0 : float, optional Initial standard deviation of the sampling distribution, defaults to 0.1. bounds : dict, optional - A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper bounds on the parameters. If ``None``, no bounds are enforced. + A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper + bounds on the parameters. If ``None``, no bounds are enforced. See Also -------- @@ -194,12 +200,44 @@ def __init__(self, x0, sigma0=0.1, bounds=None): super().__init__(x0, sigma0, self.boundaries) +class NelderMead(pints.NelderMead): + """ + Implements the Nelder-Mead downhill simplex method from PINTS. + + This is a deterministic local optimiser. In most update steps it performs + either one evaluation, or two sequential evaluations, so that it will not + typically benefit from parallelisation. + + Parameters + ---------- + x0 : array_like + The initial parameter vector to optimize. + sigma0 : float, optional + Initial standard deviation of the sampling distribution, defaults to 0.1. + Does not appear to be used. + bounds : dict, optional + Ignored by this optimiser, provided for API consistency. + + See Also + -------- + pints.NelderMead : PINTS implementation of Nelder-Mead algorithm. + """ + + def __init__(self, x0, sigma0=0.1, bounds=None): + if bounds is not None: + print("NOTE: Boundaries ignored by NelderMead") + + self.boundaries = None # Bounds ignored in pints.NelderMead + super().__init__(x0, sigma0, self.boundaries) + + class CMAES(pints.CMAES): """ Adapter for the Covariance Matrix Adaptation Evolution Strategy (CMA-ES) optimiser in PINTS. CMA-ES is an evolutionary algorithm for difficult non-linear non-convex optimization problems. - It adapts the covariance matrix of a multivariate normal distribution to capture the shape of the cost landscape. + It adapts the covariance matrix of a multivariate normal distribution to capture the shape of + the cost landscape. Parameters ---------- @@ -208,8 +246,8 @@ class CMAES(pints.CMAES): sigma0 : float, optional Initial standard deviation of the sampling distribution, defaults to 0.1. bounds : dict, optional - A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper bounds on the parameters. - If ``None``, no bounds are enforced. + A dictionary with 'lower' and 'upper' keys containing arrays for lower and upper + bounds on the parameters. If ``None``, no bounds are enforced. See Also -------- diff --git a/tests/integration/test_parameterisations.py b/tests/integration/test_parameterisations.py index af17f6af..4da45fd4 100644 --- a/tests/integration/test_parameterisations.py +++ b/tests/integration/test_parameterisations.py @@ -84,6 +84,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc): pybop.CMAES, pybop.GradientDescent, pybop.IRPropMin, + pybop.NelderMead, pybop.PSO, pybop.SNES, pybop.XNES, diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index 53d13c5a..864479d8 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -80,6 +80,7 @@ def two_param_cost(self, model, two_parameters, dataset): (pybop.XNES, "Exponential Natural Evolution Strategy (xNES)"), (pybop.PSO, "Particle Swarm Optimisation (PSO)"), (pybop.IRPropMin, "iRprop-"), + (pybop.NelderMead, "Nelder-Mead"), ], ) @pytest.mark.unit