Skip to content

Commit

Permalink
Fixes moment estimation error
Browse files Browse the repository at this point in the history
  • Loading branch information
dimtsap committed May 17, 2022
1 parent 8a2c2f3 commit 6cf455c
Show file tree
Hide file tree
Showing 4 changed files with 183 additions and 82 deletions.
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ Dependencies

* ::
Python >= 3.6
Python >= 3.9
Git >= 2.13.1

License
Expand Down
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
test-uqpy
UQpy
sphinx_autodoc_typehints
sphinx_rtd_theme
sphinx_gallery
Expand Down
167 changes: 87 additions & 80 deletions src/UQpy/surrogates/polynomial_chaos/PolynomialChaosExpansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from UQpy.surrogates.polynomial_chaos.regressions.baseclass.Regression import Regression
from UQpy.surrogates.polynomial_chaos.polynomials.TotalDegreeBasis import PolynomialBasis
from UQpy.distributions import Uniform, Normal
from UQpy.surrogates.polynomial_chaos.polynomials import Legendre,Hermite
from UQpy.surrogates.polynomial_chaos.polynomials import Legendre, Hermite


class PolynomialChaosExpansion(Surrogate):

Expand Down Expand Up @@ -97,29 +98,29 @@ def leaveoneout_error(self):
:return: Cross validation error of experimental design.
"""
x=self.experimental_design_input
y=self.experimental_design_output

x = self.experimental_design_input
y = self.experimental_design_output

if y.ndim == 1 or y.shape[1] == 1:
y = y.reshape(-1, 1)

n_samples = x.shape[0]
mu_yval = (1 / n_samples) * np.sum(y, axis=0)
y_val = self.predict(x, )
polynomialbasis= self.design_matrix
y_val = self.predict(x, )
polynomialbasis = self.design_matrix

H = np.dot(polynomialbasis, np.linalg.pinv(np.dot(polynomialbasis.T, polynomialbasis)))
H *= polynomialbasis
Hdiag = np.sum(H, axis=1).reshape(-1,1)

Hdiag = np.sum(H, axis=1).reshape(-1, 1)

eps_val=((n_samples - 1) / n_samples *np.sum(((y - y_val)/(1 - Hdiag))**2,axis=0)) / (np.sum((y - mu_yval) ** 2, axis=0))
eps_val = ((n_samples - 1) / n_samples * np.sum(((y - y_val) / (1 - Hdiag)) ** 2, axis=0)) / (
np.sum((y - mu_yval) ** 2, axis=0))
if y.ndim == 1 or y.shape[1] == 1:
eps_val = float(eps_val)

return np.round(eps_val, 7)

def validation_error(self, x: np.ndarray, y: np.ndarray):
"""
Returns the validation error.
Expand Down Expand Up @@ -180,7 +181,7 @@ def get_moments(self, higher: bool = False):
:param higher: True corresponds to calculation of skewness and kurtosis (computationaly expensive for large basis set).
:return: Returns the mean and variance.
"""

if self.bias is not None:
mean = self.coefficients[0, :] + np.squeeze(self.bias)
else:
Expand All @@ -192,77 +193,83 @@ def get_moments(self, higher: bool = False):
variance = float(variance)
mean = float(mean)

if higher==False:
return np.round(mean, 4), np.round(variance, 4)
if not higher:
return mean, variance

else:
multindex=self.multi_index_set
P,inputs_number=multindex.shape
if inputs_number==1:
marginals=[self.polynomial_basis.distributions]
multindex = self.multi_index_set
P, inputs_number = multindex.shape

if inputs_number == 1:
marginals = [self.polynomial_basis.distributions]

else:
marginals=self.polynomial_basis.distributions.marginals


skewness=np.zeros(self.outputs_number)
kurtosis=np.zeros(self.outputs_number)

for ii in range (0,self.outputs_number):

Beta=self.coefficients[:, ii]
third_moment=0
fourth_moment=0

indices=np.array(np.meshgrid(range(1,P),range(1,P),range(1,P),range(1,P))).T.reshape(-1,4)
i=0
marginals = self.polynomial_basis.distributions.marginals

skewness = np.zeros(self.outputs_number)
kurtosis = np.zeros(self.outputs_number)

for ii in range(0, self.outputs_number):

Beta = self.coefficients[:, ii]
third_moment = 0
fourth_moment = 0

indices = np.array(np.meshgrid(range(1, P), range(1, P), range(1, P), range(1, P))).T.reshape(-1, 4)
i = 0
for index in indices:
tripleproduct_ND=1
quadproduct_ND=1


for m in range (0,inputs_number):

if i<(P-1)**3:

if type(marginals[m])==Normal:
tripleproduct_1D=Hermite.hermite_triple_product(multindex[index[0],m],multindex[index[1],m],multindex[index[2],m])

if type(marginals[m])==Uniform:
tripleproduct_1D=Legendre.legendre_triple_product(multindex[index[0],m],multindex[index[1],m],multindex[index[2],m])

tripleproduct_ND=tripleproduct_ND*tripleproduct_1D

tripleproduct_ND = 1
quadproduct_ND = 1

for m in range(0, inputs_number):

if i < (P - 1) ** 3:

if type(marginals[m]) == Normal:
tripleproduct_1D = Hermite.hermite_triple_product(multindex[index[0], m],
multindex[index[1], m],
multindex[index[2], m])

if type(marginals[m]) == Uniform:
tripleproduct_1D = Legendre.legendre_triple_product(multindex[index[0], m],
multindex[index[1], m],
multindex[index[2], m])

tripleproduct_ND = tripleproduct_ND * tripleproduct_1D

else:
tripleproduct_ND=0

quadproduct_1D=0

for n in range (0,multindex[index[0],m]+multindex[index[1],m]+1):

if type(marginals[m])==Normal:
tripproduct1=Hermite.hermite_triple_product(multindex[index[0],m],multindex[index[1],m],n)
tripproduct2=Hermite.hermite_triple_product(multindex[index[2],m],multindex[index[3],m],n)

if type(marginals[m])==Uniform:
tripproduct1=Legendre.legendre_triple_product(multindex[index[0],m],multindex[index[1],m],n)
tripproduct2=Legendre.legendre_triple_product(multindex[index[2],m],multindex[index[3],m],n)

quadproduct_1D=quadproduct_1D+tripproduct1*tripproduct2

quadproduct_ND=quadproduct_ND*quadproduct_1D

third_moment+=tripleproduct_ND*Beta[index[0]]*Beta[index[1]]*Beta[index[2]]
fourth_moment+=quadproduct_ND*Beta[index[0]]*Beta[index[1]]*Beta[index[2]]*Beta[index[3]]

i+=1

skewness[ii]=1/(np.sqrt(variance)**3)*third_moment
kurtosis[ii]=1/(variance**2)*fourth_moment

tripleproduct_ND = 0

quadproduct_1D = 0

for n in range(0, multindex[index[0], m] + multindex[index[1], m] + 1):

if type(marginals[m]) == Normal:
tripproduct1 = Hermite.hermite_triple_product(multindex[index[0], m],
multindex[index[1], m], n)
tripproduct2 = Hermite.hermite_triple_product(multindex[index[2], m],
multindex[index[3], m], n)

if type(marginals[m]) == Uniform:
tripproduct1 = Legendre.legendre_triple_product(multindex[index[0], m],
multindex[index[1], m], n)
tripproduct2 = Legendre.legendre_triple_product(multindex[index[2], m],
multindex[index[3], m], n)

quadproduct_1D = quadproduct_1D + tripproduct1 * tripproduct2

quadproduct_ND = quadproduct_ND * quadproduct_1D

third_moment += tripleproduct_ND * Beta[index[0]] * Beta[index[1]] * Beta[index[2]]
fourth_moment += quadproduct_ND * Beta[index[0]] * Beta[index[1]] * Beta[index[2]] * Beta[index[3]]

i += 1

skewness[ii] = 1 / (np.sqrt(variance) ** 3) * third_moment
kurtosis[ii] = 1 / (variance ** 2) * fourth_moment

if self.coefficients.ndim == 1 or self.coefficients.shape[1] == 1:
skewness = float(skewness[0])
kurtosis = float(kurtosis[0])

return mean,variance,skewness,kurtosis
return mean, variance, skewness, kurtosis
94 changes: 94 additions & 0 deletions tests/unit_tests/surrogates/test_pce.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,100 @@ def test_17():
assert round(generalized_first_sobol[0], 4) == 0.0137


def test_lotka_volterra_generalized_sobol():
import numpy as np
import math
from scipy import integrate
from UQpy.distributions import Uniform, JointIndependent
from UQpy.surrogates.polynomial_chaos import TotalDegreeBasis, \
LeastSquareRegression, \
PolynomialChaosExpansion
from UQpy.sensitivity.PceSensitivity import PceSensitivity

### function to be approximated
def LV(a, b, c, d, t):

# X_f0 = np.array([ 0. , 0.])
X_f1 = np.array([c / (d * b), a / b])

def dX_dt(X, t=0):
""" Return the growth rate of fox and rabbit populations. """
return np.array([a * X[0] - b * X[0] * X[1],
-c * X[1] + d * b * X[0] * X[1]])

X0 = np.array([10, 5]) # initials conditions: 10 rabbits and 5 foxes

X, infodict = integrate.odeint(dX_dt, X0, t, full_output=True)

return X, X_f1

# set random seed for reproducibility
np.random.seed(1)

### simulation parameters
n = 512
t = np.linspace(0, 25, n)

### Probability distributions of input parameters
pdf1 = Uniform(loc=0.9, scale=0.1) # a
pdf2 = Uniform(loc=0.1, scale=0.05) # b
# pdf2 = Uniform(loc=8, scale=10) # c
# pdf2 = Uniform(loc=8, scale=10) # d
c = 1.5
d = 0.75
margs = [pdf1, pdf2]
joint = JointIndependent(marginals=margs)

print('Total degree: ', max_degree)
polynomial_basis = TotalDegreeBasis(joint, max_degree)

print('Size of basis:', polynomial_basis.polynomials_number)
# training data
sampling_coeff = 5
print('Sampling coefficient: ', sampling_coeff)
np.random.seed(42)
n_samples = math.ceil(sampling_coeff * polynomial_basis.polynomials_number)
print('Training data: ', n_samples)
x_train = joint.rvs(n_samples)
y_train = []
for i in range(x_train.shape[0]):
out, X_f1 = LV(x_train[i, 0], x_train[i, 1], c, d, t)
y_train.append(out.flatten())
print('Training sample size:', n_samples)

# fit model
least_squares = LeastSquareRegression()
pce_metamodel = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,
regression_method=least_squares)
pce_metamodel.fit(x_train, y_train)

# approximation errors
np.random.seed(43)
n_samples_test = 5000
x_test = joint.rvs(n_samples_test)
y_test = []
for i in range(x_test.shape[0]):
out, X_f1 = LV(x_test[i, 0], x_test[i, 1], c, d, t)
y_test.append(out.flatten())
print('Test sample size:', n_samples_test)

y_test_pce = pce_metamodel.predict(x_test)
errors = np.abs(y_test_pce - y_test)
l2_rel_err = np.linalg.norm(errors, axis=1) / np.linalg.norm(y_test, axis=1)

l2_rel_err_mean = np.mean(l2_rel_err)
print('Mean L2 relative error:', l2_rel_err_mean)

# Sobol sensitivity analysis
pce_sa = PceSensitivity(pce_metamodel)
GS1 = pce_sa.calculate_generalized_first_order_indices()
assert round(GS1[0], 4) == 0.2148
assert round(GS1[1], 4) == 0.7426
GST = pce_sa.calculate_generalized_total_order_indices()
assert round(GST[0], 4) == 0.2574
assert round(GST[1], 4) == 0.7852


def test_18():
"""
Test Sobol indices for vector-valued quantity of interest on the random inputs
Expand Down

0 comments on commit 6cf455c

Please sign in to comment.