Skip to content

Commit

Permalink
tests: adds rhat assertion, updates thevenin sampling for MAP estimat…
Browse files Browse the repository at this point in the history
…e x0.
  • Loading branch information
BradyPlanden committed Oct 23, 2024
1 parent 35ce04b commit 44a03f2
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 22 deletions.
2 changes: 1 addition & 1 deletion pybop/samplers/mcmc_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def autocorrelation(self, x: np.ndarray) -> np.ndarray:
Computes the autocorrelation (Pearson correlation coefficient)
of a numpy array representing samples.
"""
x = (x - x.mean()) / (x.std() * np.sqrt(len(x)))
x = (x - x.mean()) / (x.std() * np.sqrt(len(x)) + np.finfo(float).eps)
cor = np.correlate(x, x, mode="full")
return cor[len(x) : -1]

Expand Down
50 changes: 29 additions & 21 deletions tests/integration/test_monte_carlo_thevenin.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def noise(self, sigma, values):
return np.random.normal(0, sigma, values)

@pytest.fixture
def likelihood(self, model, parameters, init_soc):
def posterior(self, model, parameters, init_soc):
# Form dataset
solution = self.get_data(model, init_soc)
dataset = pybop.Dataset(
Expand All @@ -86,7 +86,20 @@ def likelihood(self, model, parameters, init_soc):

# Define the cost to optimise
problem = pybop.FittingProblem(model, parameters, dataset)
return pybop.GaussianLogLikelihoodKnownSigma(problem, sigma0=0.0075)
likelihood = pybop.GaussianLogLikelihoodKnownSigma(problem, sigma0=self.sigma0)
return pybop.LogPosterior(likelihood)

@pytest.fixture
def map_estimate(self, posterior):
common_args = {
"max_iterations": 100,
"max_unchanged_iterations": 35,
"sigma0": [3e-4, 3e-4],
}
optim = pybop.CMAES(posterior, **common_args)
results = optim.run()

return results.x

# Parameterize the samplers
@pytest.mark.parametrize(
Expand All @@ -105,46 +118,41 @@ def likelihood(self, model, parameters, init_soc):
],
)
@pytest.mark.integration
def test_sampling_thevenin(self, sampler, likelihood):
posterior = pybop.LogPosterior(likelihood)

# set common args
def test_sampling_thevenin(self, sampler, posterior, map_estimate):
x0 = np.clip(map_estimate + np.random.normal(0, 1e-3, size=2), 1e-3, 1e-1)
common_args = {
"log_pdf": posterior,
"chains": 1,
"warm_up": 250,
"max_iterations": 500,
"cov0": [3e-4, 3e-4],
"warm_up": 450,
"cov0": [1e-3, 1e-3] if sampler in self.fast_samplers else [0.1, 0.1],
"max_iterations": 1000,
"x0": x0,
}
if sampler in self.fast_samplers:
common_args["warm_up"] = 600
common_args["max_iterations"] = 1200

# construct and run
sampler = sampler(**common_args)
if isinstance(sampler, SliceRankShrinkingMCMC):
sampler._samplers[0].set_hyper_parameters([1e-3])
results = sampler.run()
chains = sampler.run()

# Test PosteriorSummary
summary = pybop.PosteriorSummary(results)
summary = pybop.PosteriorSummary(chains)
ess = summary.effective_sample_size()
np.testing.assert_array_less(0, ess)
if not isinstance(sampler, RelativisticMCMC):
np.testing.assert_array_less(summary.rhat(), 1.05)

# Assert both final sample and posterior mean
x = np.mean(results, axis=1)
x = np.mean(chains, axis=1)
for i in range(len(x)):
np.testing.assert_allclose(x[i], self.ground_truth, atol=1.5e-2)
np.testing.assert_allclose(results[i][-1], self.ground_truth, atol=1e-2)
np.testing.assert_allclose(x[i], self.ground_truth, atol=5e-3)
np.testing.assert_allclose(chains[i][-1], self.ground_truth, atol=1e-2)

def get_data(self, model, init_soc):
initial_state = {"Initial SoC": init_soc}
experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 2 minutes (4 second period)",
"Rest for 1 minute (4 second period)",
),
("Discharge at 0.5C for 4 minutes (8 second period)",),
]
)
sim = model.predict(initial_state=initial_state, experiment=experiment)
Expand Down

0 comments on commit 44a03f2

Please sign in to comment.