From 7da93b1420e584141685a62df3e958199c02b8ee Mon Sep 17 00:00:00 2001
From: Prateek Bhustali
Date: Mon, 6 Jun 2022 18:24:06 +0200
Subject: [PATCH] Added a basic comparison of sensitivity indices
---
docs/code/sensitivity/comparison/README.rst | 6 +
.../sensitivity/comparison/local_additive.py | 21 +++
.../sensitivity/comparison/local_ishigami.py | 23 +++
.../sensitivity/comparison/plot_additive.py | 147 ++++++++++++++++
.../sensitivity/comparison/plot_ishigami.py | 165 ++++++++++++++++++
docs/source/conf.py | 2 +
docs/source/sensitivity/index.rst | 7 +
7 files changed, 371 insertions(+)
create mode 100644 docs/code/sensitivity/comparison/README.rst
create mode 100644 docs/code/sensitivity/comparison/local_additive.py
create mode 100644 docs/code/sensitivity/comparison/local_ishigami.py
create mode 100644 docs/code/sensitivity/comparison/plot_additive.py
create mode 100644 docs/code/sensitivity/comparison/plot_ishigami.py
diff --git a/docs/code/sensitivity/comparison/README.rst b/docs/code/sensitivity/comparison/README.rst
new file mode 100644
index 00000000..59f928b8
--- /dev/null
+++ b/docs/code/sensitivity/comparison/README.rst
@@ -0,0 +1,6 @@
+Comparison of Sensitivity indices
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In this section we compare the sensitivity indices (Sobol, Cramér-von Mises and Chatterjee) available in the package using the 'Ishigami function' and the 'Additive model' to illustrate the differences.
+
+In both the examples, we note that the Cramér-von Mises indices and the Chatterjee indices are almost equal (as the Chatterjee indices converge to the Cramér-von Mises indices in the sample limit).
\ No newline at end of file
diff --git a/docs/code/sensitivity/comparison/local_additive.py b/docs/code/sensitivity/comparison/local_additive.py
new file mode 100644
index 00000000..a0893fa1
--- /dev/null
+++ b/docs/code/sensitivity/comparison/local_additive.py
@@ -0,0 +1,21 @@
+"""
+
+Auxiliary file
+==============================================
+
+"""
+
+import numpy as np
+
+
+def evaluate(X, params) -> np.array:
+ r"""A linear function that is used to demonstrate sensitivity indices.
+
+ .. math::
+ f(x) = a \cdot x_1 + b \cdot x_2
+ """
+ a, b = params
+
+ Y = a * X[:, 0] + b * X[:, 1]
+
+ return Y
diff --git a/docs/code/sensitivity/comparison/local_ishigami.py b/docs/code/sensitivity/comparison/local_ishigami.py
new file mode 100644
index 00000000..e5af649f
--- /dev/null
+++ b/docs/code/sensitivity/comparison/local_ishigami.py
@@ -0,0 +1,23 @@
+"""
+
+Auxiliary file
+==============================================
+
+"""
+
+import numpy as np
+
+
+def evaluate(X, params=[7, 0.1]):
+ """Non-monotonic Ishigami-Homma three parameter test function"""
+
+ a = params[0]
+ b = params[1]
+
+ Y = (
+ np.sin(X[:, 0])
+ + a * np.power(np.sin(X[:, 1]), 2)
+ + b * np.power(X[:, 2], 4) * np.sin(X[:, 0])
+ )
+
+ return Y
diff --git a/docs/code/sensitivity/comparison/plot_additive.py b/docs/code/sensitivity/comparison/plot_additive.py
new file mode 100644
index 00000000..7c34e28b
--- /dev/null
+++ b/docs/code/sensitivity/comparison/plot_additive.py
@@ -0,0 +1,147 @@
+"""
+
+Additive function
+==============================================
+
+.. math::
+ f(x) = a \cdot X_1 + b \cdot X_2, \quad X_1, X_2 \sim \mathcal{N}(0, 1), \quad a,b \in \mathbb{R}
+
+"""
+
+# %%
+import numpy as np
+import matplotlib.pyplot as plt
+
+from UQpy.run_model.RunModel import RunModel
+from UQpy.run_model.model_execution.PythonModel import PythonModel
+from UQpy.distributions import Normal
+from UQpy.distributions.collection.JointIndependent import JointIndependent
+from UQpy.sensitivity.Chatterjee import Chatterjee
+from UQpy.sensitivity.CramervonMises import CramervonMises as cvm
+from UQpy.sensitivity.Sobol import Sobol
+from UQpy.sensitivity.PostProcess import *
+
+np.random.seed(123)
+
+# %% [markdown]
+# **Define the model and input distributions**
+
+# Create Model object
+a, b = 1, 2
+
+model = PythonModel(
+ model_script="local_additive.py",
+ model_object_name="evaluate",
+ var_names=[
+ "X_1",
+ "X_2",
+ ],
+ delete_files=True,
+ params=[a, b],
+)
+
+runmodel_obj = RunModel(model=model)
+
+# Define distribution object
+dist_object = JointIndependent([Normal(0, 1)] * 2)
+
+# %% [markdown]
+# **Compute Sobol indices**
+
+# %% [markdown]
+SA_sobol = Sobol(runmodel_obj, dist_object)
+
+computed_indices_sobol = SA_sobol.run(n_samples=50_000)
+
+# %% [markdown]
+# **First order Sobol indices**
+#
+# Expected first order Sobol indices:
+#
+# :math:`\mathrm{S}_1 = \frac{a^2 \cdot \mathbb{V}[X_1]}{a^2 \cdot \mathbb{V}[X_1] + b^2 \cdot \mathbb{V}[X_2]} = \frac{1^2 \cdot 1}{1^2 \cdot 1 + 2^2 \cdot 1} = 0.2`
+#
+# :math:`\mathrm{S}_2 = \frac{b^2 \cdot \mathbb{V}[X_2]}{a^2 \cdot \mathbb{V}[X_1] + b^2 \cdot \mathbb{V}[X_2]} = \frac{2^2 \cdot 1}{1^2 \cdot 1 + 2^2 \cdot 1} = 0.8`
+
+# %%
+computed_indices_sobol["sobol_i"]
+
+# %% [markdown]
+# **Compute Chatterjee indices**
+
+# %% [markdown]
+SA_chatterjee = Chatterjee(runmodel_obj, dist_object)
+
+computed_indices_chatterjee = SA_chatterjee.run(n_samples=50_000)
+
+# %%
+computed_indices_chatterjee["chatterjee_i"]
+
+# %%
+SA_cvm = cvm(runmodel_obj, dist_object)
+
+# Compute CVM indices using the pick and freeze algorithm
+computed_indices_cvm = SA_cvm.run(n_samples=20_000, estimate_sobol_indices=True)
+
+# %%
+computed_indices_cvm["CVM_i"]
+
+# %%
+# **Plot all indices**
+
+num_vars = 2
+_idx = np.arange(num_vars)
+variable_names = [r"$X_{}$".format(i + 1) for i in range(num_vars)]
+
+# round to 2 decimal places
+indices_1 = np.around(computed_indices_sobol["sobol_i"][:, 0], decimals=2)
+indices_2 = np.around(computed_indices_chatterjee["chatterjee_i"][:, 0], decimals=2)
+indices_3 = np.around(computed_indices_cvm["CVM_i"][:, 0], decimals=2)
+
+fig, ax = plt.subplots()
+width = 0.3
+ax.spines["top"].set_visible(False)
+ax.spines["right"].set_visible(False)
+
+bar_indices_1 = ax.bar(
+ _idx - width, # x-axis
+ indices_1, # y-axis
+ width=width, # bar width
+ color="C0", # bar color
+ # alpha=0.5, # bar transparency
+ label="Sobol", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+bar_indices_2 = ax.bar(
+ _idx, # x-axis
+ indices_2, # y-axis
+ width=width, # bar width
+ color="C2", # bar color
+ # alpha=0.5, # bar transparency
+ label="Chatterjee", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+bar_indices_3 = ax.bar(
+ _idx + width, # x-axis
+ indices_3, # y-axis
+ width=width, # bar width
+ color="C3", # bar color
+ # alpha=0.5, # bar transparency
+ label="Cramér-von Mises", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+ax.bar_label(bar_indices_1, label_type="edge", fontsize=10)
+ax.bar_label(bar_indices_2, label_type="edge", fontsize=10)
+ax.bar_label(bar_indices_3, label_type="edge", fontsize=10)
+ax.set_xticks(_idx, variable_names)
+ax.set_xlabel("Model inputs")
+ax.set_title("Comparison of sensitivity indices")
+ax.set_ylim(top=1) # set only upper limit of y to 1
+ax.legend()
+
+plt.show()
diff --git a/docs/code/sensitivity/comparison/plot_ishigami.py b/docs/code/sensitivity/comparison/plot_ishigami.py
new file mode 100644
index 00000000..11624573
--- /dev/null
+++ b/docs/code/sensitivity/comparison/plot_ishigami.py
@@ -0,0 +1,165 @@
+r"""
+
+Ishigami function
+==============================================
+
+The ishigami function is a non-linear, non-monotonic function that is commonly used to
+benchmark uncertainty and senstivity analysis methods.
+
+.. math::
+ f(x_1, x_2, x_3) = sin(x_1) + a \cdot sin^2(x_2) + b \cdot x_3^4 sin(x_1)
+
+.. math::
+ x_1, x_2, x_3 \sim \mathcal{U}(-\pi, \pi), \quad a, b\in \mathbb{R}
+
+"""
+
+# %%
+import numpy as np
+
+from UQpy.run_model.RunModel import RunModel
+from UQpy.run_model.model_execution.PythonModel import PythonModel
+from UQpy.distributions import Uniform
+from UQpy.distributions.collection.JointIndependent import JointIndependent
+from UQpy.sensitivity.Chatterjee import Chatterjee
+from UQpy.sensitivity.CramervonMises import CramervonMises as cvm
+from UQpy.sensitivity.Sobol import Sobol
+from UQpy.sensitivity.PostProcess import *
+
+np.random.seed(123)
+
+# %% [markdown]
+# **Define the model and input distributions**
+
+# %%
+# Create Model object
+model = PythonModel(
+ model_script="local_ishigami.py",
+ model_object_name="evaluate",
+ var_names=[r"$X_1$", "$X_2$", "$X_3$"],
+ delete_files=True,
+ params=[7, 0.1],
+)
+
+runmodel_obj = RunModel(model=model)
+
+# Define distribution object
+dist_object = JointIndependent([Uniform(-np.pi, 2 * np.pi)] * 3)
+
+# %% [markdown]
+# **Compute Sobol indices**
+
+# %%
+SA_sobol = Sobol(runmodel_obj, dist_object)
+
+computed_indices_sobol = SA_sobol.run(n_samples=100_000)
+
+# %% [markdown]
+# **First order Sobol indices**
+#
+# Expected first order Sobol indices:
+#
+# :math:`S_1` = 0.3139
+#
+# :math:`S_2` = 0.4424
+#
+# :math:`S_3` = 0.0
+
+# %%
+computed_indices_sobol["sobol_i"]
+
+# %% [markdown]
+# **Total order Sobol indices**
+#
+# Expected total order Sobol indices:
+#
+# :math:`S_{T_1}` = 0.55758886
+#
+# :math:`S_{T_2}` = 0.44241114
+#
+# :math:`S_{T_3}` = 0.24368366
+
+# %%
+computed_indices_sobol["sobol_total_i"]
+
+# %% [markdown]
+# **Compute Chatterjee indices**
+
+# %% [markdown]
+SA_chatterjee = Chatterjee(runmodel_obj, dist_object)
+
+computed_indices_chatterjee = SA_chatterjee.run(n_samples=50_000)
+
+# %%
+computed_indices_chatterjee["chatterjee_i"]
+
+# %% [markdown]
+# **Compute Cramér-von Mises indices**
+SA_cvm = cvm(runmodel_obj, dist_object)
+
+# Compute CVM indices using the pick and freeze algorithm
+computed_indices_cvm = SA_cvm.run(n_samples=20_000, estimate_sobol_indices=True)
+
+# %%
+computed_indices_cvm["CVM_i"]
+
+# %%
+# **Plot all indices**
+
+num_vars = 3
+_idx = np.arange(num_vars)
+variable_names = [r"$X_{}$".format(i + 1) for i in range(num_vars)]
+
+# round to 2 decimal places
+indices_1 = np.around(computed_indices_sobol["sobol_i"][:, 0], decimals=2)
+indices_2 = np.around(computed_indices_chatterjee["chatterjee_i"][:, 0], decimals=2)
+indices_3 = np.around(computed_indices_cvm["CVM_i"][:, 0], decimals=2)
+
+fig, ax = plt.subplots()
+width = 0.3
+ax.spines["top"].set_visible(False)
+ax.spines["right"].set_visible(False)
+
+bar_indices_1 = ax.bar(
+ _idx - width, # x-axis
+ indices_1, # y-axis
+ width=width, # bar width
+ color="C0", # bar color
+ # alpha=0.5, # bar transparency
+ label="Sobol", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+bar_indices_2 = ax.bar(
+ _idx, # x-axis
+ indices_2, # y-axis
+ width=width, # bar width
+ color="C2", # bar color
+ # alpha=0.5, # bar transparency
+ label="Chatterjee", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+bar_indices_3 = ax.bar(
+ _idx + width, # x-axis
+ indices_3, # y-axis
+ width=width, # bar width
+ color="C3", # bar color
+ # alpha=0.5, # bar transparency
+ label="Cramér-von Mises", # bar label
+ ecolor="k", # error bar color
+ capsize=5, # error bar cap size in pt
+)
+
+ax.bar_label(bar_indices_1, label_type="edge", fontsize=10)
+ax.bar_label(bar_indices_2, label_type="edge", fontsize=10)
+ax.bar_label(bar_indices_3, label_type="edge", fontsize=10)
+ax.set_xticks(_idx, variable_names)
+ax.set_xlabel("Model inputs")
+ax.set_title("Comparison of sensitivity indices")
+ax.set_ylim(top=1) # set only upper limit of y to 1
+ax.legend()
+
+plt.show()
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 68538001..3c7c1031 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -95,6 +95,7 @@
"../code/sensitivity/cramer_von_mises",
"../code/sensitivity/chatterjee",
"../code/sensitivity/generalised_sobol",
+ "../code/sensitivity/comparison",
"../code/stochastic_processes/bispectral",
"../code/stochastic_processes/karhunen_loeve",
"../code/stochastic_processes/spectral",
@@ -133,6 +134,7 @@
"auto_examples/sensitivity/cramer_von_mises",
"auto_examples/sensitivity/chatterjee",
"auto_examples/sensitivity/generalised_sobol",
+ "auto_examples/sensitivity/comparison",
"auto_examples/stochastic_processes/bispectral",
"auto_examples/stochastic_processes/karhunen_loeve",
"auto_examples/stochastic_processes/spectral",
diff --git a/docs/source/sensitivity/index.rst b/docs/source/sensitivity/index.rst
index 1b2a8367..161cfd3b 100644
--- a/docs/source/sensitivity/index.rst
+++ b/docs/source/sensitivity/index.rst
@@ -26,3 +26,10 @@ Sensitivity analysis comprises techniques focused on determining how the variati
Morris Sensitivity
Polynomial Chaos Sensitivity
Sobol Sensitivity
+
+Examples
+""""""""""
+
+.. toctree::
+
+ Comparison of indices <../auto_examples/sensitivity/comparison/index>