From 04d1d1994f1adafc5aef3603a6129c8bcf833fec Mon Sep 17 00:00:00 2001 From: Brady Planden Date: Mon, 9 Sep 2024 11:57:44 +0100 Subject: [PATCH] examples: small fixes to maximum_a_posteriori.ipynb --- examples/notebooks/maximum_a_posteriori.ipynb | 99 ++++++++++++------- 1 file changed, 64 insertions(+), 35 deletions(-) diff --git a/examples/notebooks/maximum_a_posteriori.ipynb b/examples/notebooks/maximum_a_posteriori.ipynb index c25fb8345..299ec9a79 100644 --- a/examples/notebooks/maximum_a_posteriori.ipynb +++ b/examples/notebooks/maximum_a_posteriori.ipynb @@ -6,9 +6,9 @@ "id": "expmkveO04pw" }, "source": [ - "## Maximum a Posteriori Introduction\n", + "## Maximum a Posteriori Parameter Inference\n", "\n", - "In this notebook, we introduce the Maximum a Posteriori (MAP), which extends Maximum Likelihood Estimation (MLE) by inclusion of a prior $p(\\theta)$ into the cost function. This becomes a Bayesian method, and follows the Bayesian Theorem given as,\n", + "In this notebook, we introduce the Maximum a Posteriori (MAP), which extends Maximum Likelihood Estimation (MLE) by inclusion of a prior $p(\\theta)$ into the cost function. To include this prior information, we construct a Bayesian Posterior with Bayesian's Theorem given as,\n", "\n", "$$\n", "P(\\theta|D) = \\frac{P(D|\\theta)P(\\theta)}{P(D)}\n", @@ -102,7 +102,15 @@ "metadata": { "id": "sBasxv8U04p3" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of observations in each trajectory: [2, 4, 8, 16, 32, 64, 128, 256]\n" + ] + } + ], "source": [ "n = 8 # Number of time-series trajectories\n", "observations = [\n", @@ -111,7 +119,9 @@ "values = []\n", "for i in observations:\n", " t_eval = np.linspace(0, 10, i)\n", - " values.append(model.predict(t_eval=t_eval)) # predict and store" + " values.append(model.predict(t_eval=t_eval)) # predict and store\n", + "\n", + "print(f\"Number of observations in each trajectory: {observations}\")" ] }, { @@ -141,7 +151,7 @@ "source": [ "### Creating the PyBOP dataset\n", "\n", - "The reference trajectory needs to be included in the optimisation task, which is handed within the `Dataset` class. In this situation, this class is composed of the time, current, and the noisy voltage data; however, if we were completing parameter inference from a different measured signal, such as 'Cell Temperature', that would need to be included." + "The reference trajectory needs to be included in the optimisation task, which is handed within the `Dataset` class. In this situation, this class is composed of the time, current, and the noisy voltage data; however, if we were performing parameter inference from a different measured signal, such as 'Cell Temperature', that would need to be included." ] }, { @@ -168,7 +178,7 @@ }, "source": [ "### Constructing Parameters Class\n", - "Next, we need to select the forward model parameters for inference. The PyBOP parameters class composes as many PyBOP parameter classes as the user wants (whether that is a good idea is left out of this example). This class requires the parameter name, which must resolve to a parameter within the `ParameterSet` defined above. Additionally, this class can accept an `initial_value` which will be used by the optimiser, as well as bounds. For this example, we will provide a `prior` to the parameter class, which will be used later by the MAP process." + "Next, we need to select the forward model parameters for inference. The PyBOP parameters class composes as many individual PyBOP parameters as the user wants (whether these parameters can be identified is left out of this example). This class requires the parameter name, which must resolve to a parameter within the `ParameterSet` defined above. Additionally, this class can accept an `initial_value` which will be used by the optimiser, as well as bounds. For this example, we will provide a `prior` to the parameter class, which will be used later by the MAP process." ] }, { @@ -189,8 +199,16 @@ ], "source": [ "parameters = pybop.Parameters(\n", - " pybop.Parameter(\"Negative particle radius [m]\", prior=pybop.Gaussian(4e-6, 1e-6)),\n", - " pybop.Parameter(\"Positive particle radius [m]\", prior=pybop.Gaussian(5e-6, 1e-6)),\n", + " pybop.Parameter(\n", + " \"Negative particle radius [m]\",\n", + " prior=pybop.Gaussian(4e-6, 1e-6),\n", + " true_value=parameter_set[\"Negative particle radius [m]\"],\n", + " ),\n", + " pybop.Parameter(\n", + " \"Positive particle radius [m]\",\n", + " prior=pybop.Gaussian(5e-6, 1e-6),\n", + " true_value=parameter_set[\"Positive particle radius [m]\"],\n", + " ),\n", ")" ] }, @@ -204,7 +222,7 @@ "\n", "With the datasets and parameters defined, we can now construct the `FittingProblem` which composes the model, parameters, and dataset providing a single class with the required information for simulating and assessing the forward model. \n", "\n", - "As described in the introduction to this notebook, the MAP method uses the pre-normalised posterior for optimisation. This is defined in PyBOP as the `LogPosterior` class, and has arguments for the likelihood and prior functions. If a prior is not provided, the parameter priors are used as default. In this example, we will use a `GaussianLogLikelihood` likelihood function, and the default priors set above. For numerical reasons, we optimise the log of the posterior; however this doesn't affect the final results." + "As described in the introduction to this notebook, the MAP method uses the non-normalised posterior for optimisation. This is defined in PyBOP as the `LogPosterior` class, and has arguments for the likelihood and prior functions. If a prior is not provided, the parameter priors are used as default. In this example, we will use a `GaussianLogLikelihoodKnownSigma` likelihood function, and the default priors set above. For numerical reasons, we optimise the log of the posterior; however this doesn't affect the final results." ] }, { @@ -265,9 +283,9 @@ { "data": { "text/html": [ - "