diff --git a/.gitignore b/.gitignore index d2471dff7..77104e773 100644 --- a/.gitignore +++ b/.gitignore @@ -53,4 +53,5 @@ ogcore/tests/OUTPUT_BASELINE/* ogcore/tests/OUTPUT_REFORM/* regression/OUTPUT_BASELINE/* regression/OUTPUT_REFORM* +.vscode/ *default.profraw diff --git a/environment.yml b/environment.yml index 83d7bb243..80397f15a 100644 --- a/environment.yml +++ b/environment.yml @@ -20,3 +20,7 @@ dependencies: - requests - openpyxl - black +- pip +- pip: + - pygam + diff --git a/ogcore/__init__.py b/ogcore/__init__.py index 6b1322859..cc94f5a10 100644 --- a/ogcore/__init__.py +++ b/ogcore/__init__.py @@ -19,4 +19,4 @@ from ogcore.txfunc import * from ogcore.utils import * -__version__ = "0.10.7" +__version__ = "0.10.8" diff --git a/ogcore/default_parameters.json b/ogcore/default_parameters.json index 1686224f9..5a764bd52 100644 --- a/ogcore/default_parameters.json +++ b/ogcore/default_parameters.json @@ -3149,7 +3149,8 @@ "DEP_totalinc", "GS", "linear", - "mono" + "mono", + "mono2D" ] } } diff --git a/ogcore/txfunc.py b/ogcore/txfunc.py index 69441d401..c58553773 100644 --- a/ogcore/txfunc.py +++ b/ogcore/txfunc.py @@ -23,6 +23,9 @@ from ogcore.constants import DEFAULT_START_YEAR, SHOW_RUNTIME from ogcore import utils import warnings +from pygam import LinearGAM, s, te +from matplotlib import cm +import random if not SHOW_RUNTIME: @@ -303,6 +306,69 @@ def get_tax_rates( for t in range(income.shape[0]) ] txrates = np.array(txrates) + elif tax_func_type == "mono2D": + if for_estimation: + mono_interp = params[0] + txrates = mono_interp([[X, Y]]) + else: + if np.isscalar(X) and np.isscalar(Y): + txrates = params[0]([[X, Y]]) + elif X.ndim == 1 and np.isscalar(Y): + if (X.shape[0] == len(params)) and ( + len(params) > 1 + ): # for case where loops over S + txrates = [ + params[s][0]([[X[s], Y]]) + for s in range(income.shape[0]) + ] + else: + txrates = [ + params[0](income[i]) for i in range(income.shape[0]) + ] + elif np.isscalar(X) and Y.ndim == 1: + if (Y.shape[0] == len(params)) and ( + len(params) > 1 + ): # for case where loops over S + txrates = [ + params[s][0]([[X, Y[s]]]) + for s in range(income.shape[0]) + ] + else: + txrates = [ + params[0](income[i]) for i in range(income.shape[0]) + ] + elif X.ndim == 1 and Y.ndim == 1: + if (X.shape[0] == Y.shape[0] == len(params)) and ( + len(params) > 1 + ): + txrates = [ + params[s][0]([[X[s], Y[s]]]) for s in range(X.shape[0]) + ] + else: + txrates = [ + params[0]([[X[i], Y[i]]]) for i in range(X.shape[0]) + ] + elif X.ndim == 2 and Y.ndim == 2: + txrates = [ + [ + params[s][j][0]([[X[s, j], Y[s, j]]]) + for j in range(X.shape[1]) + ] + for s in range(X.shape[0]) + ] + else: # to catch 3D arrays, looping over T, S, J + txrates = [ + [ + [ + params[t][s][j][0]([[X[t, s, j], Y[t, s, j]]]) + for j in range(income.shape[2]) + ] + for s in range(income.shape[1]) + ] + for t in range(income.shape[0]) + ] + txrates = np.squeeze(np.array(txrates)) + return txrates @@ -740,11 +806,28 @@ def txfunc_est( wsse = wsse_cstr params = [mono_interp] params_to_plot = params + elif tax_func_type == "mono2D": + obs = df.shape[0] + mono_interp, _, wsse_cstr, _, _ = monotone_spline( + # df[["total_labinc", "total_capinc"]].values, + # df["etr"].values, + # df["weight"].values, + np.vstack((X, Y)).T, + # X, Y, + txrates, + wgts, + bins=[100, 100], + method="pygam", + splines=[100, 100], + ) + wsse = wsse_cstr + params = [mono_interp] + params_to_plot = params else: raise RuntimeError( "Choice of tax function is not in the set of" + " possible tax functions. Please select" - + " from: DEP, DEP_totalinc, GS, linear." + + " from: DEP, DEP_totalinc, GS, linear, mono, mono2D." ) if graph: pp.txfunc_graph( @@ -1363,6 +1446,7 @@ def tax_func_estimate( "GS": 3, "linear": 1, "mono": 1, + "mono2D": 1, } numparams = int(tax_func_type_num_params_dict[tax_func_type]) years_list = np.arange(start_year, end_yr + 1) @@ -1686,6 +1770,59 @@ def tax_func_estimate( return dict_params +def avg_by_bin_multd(x, y, bins, weights=None): + """ + Args: + x (numpy array): 2d with dimensions n by m used for binning + y (numpy array): 1d with length n + bins (numpy array): 1d with length m, each entry must divide n + and is number of bins for corresponding column in x + weights (None or numpy array): 1d with length n specifying + weight of each observation. if None then array of ones + + Returns: + xNew (numpy array): 2d with second dimension m, first + dimension is product of elements in bins, with each entry + representative of bin across all the features + yNew (numpy array): 1d with length same as first dimension + of xWeight, weighted average of y's corresponding to each + entry of xWeight + weightsNew (numpy array): 1d with length same as yNew, weight + corresponding to each xNew, yNew row + """ + if x.shape[1] != len(bins): + message = "Dimensions of x and bins don't match: {} != {}".format( + x.shape[1], len(bins) + ) + raise ValueError(message) + else: + size = np.prod(bins) + tupleBins = tuple(bins) + xNew = np.zeros((size, x.shape[1]), dtype=float) + yNew = np.zeros(size, dtype=float) + weightsNew = np.zeros(size, dtype=float) + + # iterate through each of the final bins, which consists of bins for each feature + # for each, only retain entries falling in that bin + for i in range(size): + index = list(np.unravel_index(i, tupleBins)) + valid = np.ones(x.shape[0], dtype=bool) + for j, v in enumerate(index): + valid &= ( + x[:, j] >= np.percentile(x[:, j], v * 100 / bins[j]) + ) & (x[:, j] < np.percentile(x[:, j], (v + 1) * 100 / bins[j])) + if np.sum(valid) != 0: + xNew[i, :] = np.average( + x[valid], axis=0, weights=weights[valid] + ) + yNew[i] = np.average(y[valid], axis=0, weights=weights[valid]) + weightsNew[i] = np.sum(weights[valid]) + xNew = xNew[~(weightsNew == 0)] + yNew = yNew[~(weightsNew == 0)] + weightsNew = weightsNew[~(weightsNew == 0)] + return xNew, yNew, weightsNew + + def monotone_spline( x, y, @@ -1695,140 +1832,307 @@ def monotone_spline( kap=1e7, incl_uncstr=False, show_plot=False, + method="eilers", + splines=None, + plot_start=0, + plot_end=100, ): - # create binned and weighted x and y data - if bins: - if not np.isscalar(bins): - err_msg = "monotone_spline2 ERROR: bins value is not type scalar" + """ + Args: + method (string): 'eilers' or 'pygam' + splines (None or array-like): for 'pygam' only (otherwise set None), + number of splines used for each feature, if None use default + plot_start/plot_end (number between 0, 100): for 'pygam' only if show_plot = True, + start and end for percentile of data used in plot, can result in + better visualizations if original data has strong outliers + + + Returns: + xNew (numpy array): 2d with second dimension m, first + dimension is product of elements in bins, with each entry + representative of bin across all the features + yNew (numpy array): 1d with length same as first dimension + of xWeight, weighted average of y's corresponding to each + entry of xWeight + weightsNew (numpy array): 1d with length same as yNew, weight + corresponding to each xNew, yNew row + """ + + if method == "pygam": + if len(x.shape) == 1: + x = np.expand_dims(x, axis=1) + if splines != None and len(splines) != x.shape[1]: + err_msg = ( + " pygam method requires splines to be None or " + + " same length as # of columns in x, " + + str(len(splines)) + + " != " + + str(x.shape[1]) + ) raise ValueError(err_msg) - N = int(bins) - x_binned, y_binned, weights_binned = utils.avg_by_bin(x, y, weights, N) - - elif not bins: - N = len(x) - x_binned = x - y_binned = y - weights_binned = weights - - # Prepare bases (Imat) and penalty - dd = 3 - E = np.eye(N) - D3 = np.diff(E, n=dd, axis=0) - D1 = np.diff(E, n=1, axis=0) - - # Monotone smoothing - ws = np.zeros(N - 1) - weights_binned = weights_binned.reshape(len(weights_binned), 1) - weights1 = 0.5 * weights_binned[1:, :] + 0.5 * weights_binned[:-1, :] - weights3 = ( - 0.25 * weights_binned[3:, :] - + 0.25 * weights_binned[2:-1, :] - + 0.25 * weights_binned[1:-2, :] - + 0.25 * weights_binned[:-3, :] - ).flatten() - - for it in range(30): - Ws = np.diag(ws * kap) - mon_cof = np.linalg.solve( - E - + lam * D3.T @ np.diag(weights3) @ D3 - + D1.T @ (Ws * weights1) @ D1, - y_binned, - ) - ws_new = (D1 @ mon_cof < 0.0) * 1 - dw = np.sum(ws != ws_new) - ws = ws_new - if dw == 0: - break - - # Monotonic and non monotonic fits - y_cstr = mon_cof - wsse_cstr = (weights_binned * ((y_cstr - y_binned) ** 2)).sum() - if incl_uncstr: - y_uncstr = np.linalg.solve( - E + lam * D3.T @ np.diag(weights3) @ D3, y_binned - ) - wsse_uncstr = (weights_binned * ((y_uncstr - y_binned) ** 2)).sum() - else: - y_uncstr = None - wsse_uncstr = None - - def mono_interp(x_vec): - # replace last point in data with two copies further out to make smooth - # extrapolation - x_new = np.append( - x_binned[:-1], [1.005 * x_binned[-1], 1.01 * x_binned[-1]] - ) - y_cstr_new = np.append(y_cstr[:-1], [y_cstr[-1], y_cstr[-1]]) - # Create interpolating cubic spline for interior points - inter_interpl = intp(x_new, y_cstr_new, kind="cubic") - y_pred = np.zeros_like(x_vec) - x_lt_min = x_vec < x_binned.min() - x_gt_max = x_vec > x_new.max() - x_inter = (x_vec >= x_binned.min()) & (x_vec <= x_new.max()) - y_pred[x_inter] = inter_interpl(x_vec[x_inter]) - # extrapolate the maximum for values above the maximum - y_pred[x_gt_max] = y_cstr[-1] - # linear extrapolation of last two points for values below the min - slope = (y_cstr[1] - y_cstr[0]) / (x_binned[1] - x_binned[0]) - intercept = y_cstr[0] - slope * x_binned[0] - y_pred[x_lt_min] = slope * x_vec[x_lt_min] + intercept - - return y_pred - - if show_plot: - plt.scatter( - x, - y, - linestyle="None", - color="gray", - s=0.8, - alpha=0.7, - label="All data", - ) - if not bins: - plt.plot( - x, - y_cstr, - color="red", - alpha=1.0, - label="Monotonic smooth spline", + + # bin data + if bins == None: + x_binned, y_binned, weights_binned = x, y, weights + else: + x_binned, y_binned, weights_binned = avg_by_bin_multd( + x, y, bins, weights ) - if incl_uncstr: + + # setup pygam parameters- in addition to 's' spline terms, can also have 't' tensor + # terms which are interactions between two variables. 't' terms also need monotonic constraints + # to satisfy previous constraints, they actually impose stronger restriction + + if splines == None: + tempCstr = s(0, constraints="monotonic_inc") + for i in range(1, x_binned.shape[1]): + tempCstr += s(i, constraints="monotonic_inc") + tempUncstr = s(0) + for i in range(1, x_binned.shape[1]): + tempUncstr += s(i) + else: + tempCstr = s(0, constraints="monotonic_inc", n_splines=splines[0]) + for i in range(1, x_binned.shape[1]): + tempCstr += s( + i, constraints="monotonic_inc", n_splines=splines[i] + ) + tempUncstr = s(0, n_splines=splines[0]) + for i in range(1, x_binned.shape[1]): + tempUncstr += s(i, n_splines=splines[i]) + + # fit data + gamCstr = LinearGAM(tempCstr).fit(x_binned, y_binned, weights_binned) + y_cstr = gamCstr.predict(x_binned) + wsse_cstr = (weights_binned * ((y_cstr - y_binned) ** 2)).sum() + if incl_uncstr: + gamUncstr = LinearGAM(tempUncstr).fit( + x_binned, y_binned, weights_binned + ) + y_uncstr = gamUncstr.predict(x_binned) + wsse_uncstr = (weights_binned * ((y_uncstr - y_binned) ** 2)).sum() + else: + y_uncstr = None + wsse_uncstr = None + + if show_plot: + if x.shape[1] == 2: + fig, ax = plt.subplots(subplot_kw={"projection": "3d"}) + # select data in [plot_start, end] percentile across both features + # this can be rewritten to generalize for n-dimensions, but didn't know how to plot that + xactPlot = x[ + (x[:, 0] >= np.percentile(x[:, 0], plot_start)) + & (x[:, 0] <= np.percentile(x[:, 0], plot_end)) + & (x[:, 1] >= np.percentile(x[:, 1], plot_start)) + & (x[:, 1] <= np.percentile(x[:, 1], plot_end)) + ] + yactPlot = y[ + (x[:, 0] >= np.percentile(x[:, 0], plot_start)) + & (x[:, 0] <= np.percentile(x[:, 0], plot_end)) + & (x[:, 1] >= np.percentile(x[:, 1], plot_start)) + & (x[:, 1] <= np.percentile(x[:, 1], plot_end)) + ] + ax.scatter( + xactPlot[:, 0], + xactPlot[:, 1], + yactPlot, + color="black", + s=0.8, + alpha=0.25, + ) + + x0 = np.linspace( + np.percentile(x[:, 0], plot_start), + np.percentile(x[:, 0], plot_end), + 1000, + ) + x1 = np.linspace( + np.percentile(x[:, 1], plot_start), + np.percentile(x[:, 1], plot_end), + 1000, + ) + X0, X1 = np.meshgrid(x0, x1) + yPred = gamCstr.predict( + np.array([X0.flatten(), X1.flatten()]).T + ) + ax.plot_surface( + X0, X1, yPred.reshape(x0.shape[0], -1), color="red" + ) + ax.set_label("Monotonic GAM spline with all data") + + if x.shape[1] == 1: + plt.scatter( + x, + y, + linestyle="None", + color="gray", + s=0.8, + alpha=0.7, + label="All data", + ) plt.plot( x, - y_uncstr, - color="blue", + y_cstr, + color="red", alpha=1.0, - label="Unconstrained smooth spline", + label="Monotonic GAM spline", ) + if incl_uncstr: + plt.plot( + x, + y_uncstr, + color="blue", + alpha=1.0, + label="Unconstrained GAM spline", + ) + plt.show() + plt.close() + + def interp(x): + return gamCstr.predict(x) + + return interp, y_cstr, wsse_cstr, y_uncstr, wsse_uncstr + + if method == "eilers": + # create binned and weighted x and y data + if bins: + if not np.isscalar(bins): + err_msg = ( + "monotone_spline2 ERROR: bins value is not type scalar" + ) + raise ValueError(err_msg) + N = int(bins) + x_binned, y_binned, weights_binned = utils.avg_by_bin( + x, y, weights, N + ) + + elif not bins: + N = len(x) + x_binned = x + y_binned = y + weights_binned = weights + + # Prepare bases (Imat) and penalty + dd = 3 + E = np.eye(N) + D3 = np.diff(E, n=dd, axis=0) + D1 = np.diff(E, n=1, axis=0) + + # Monotone smoothing + ws = np.zeros(N - 1) + weights_binned = weights_binned.reshape(len(weights_binned), 1) + weights1 = 0.5 * weights_binned[1:, :] + 0.5 * weights_binned[:-1, :] + weights3 = ( + 0.25 * weights_binned[3:, :] + + 0.25 * weights_binned[2:-1, :] + + 0.25 * weights_binned[1:-2, :] + + 0.25 * weights_binned[:-3, :] + ).flatten() + + for it in range(30): + Ws = np.diag(ws * kap) + mon_cof = np.linalg.solve( + E + + lam * D3.T @ np.diag(weights3) @ D3 + + D1.T @ (Ws * weights1) @ D1, + y_binned, + ) + ws_new = (D1 @ mon_cof < 0.0) * 1 + dw = np.sum(ws != ws_new) + ws = ws_new + if dw == 0: + break + + # Monotonic and non monotonic fits + y_cstr = mon_cof + wsse_cstr = (weights_binned * ((y_cstr - y_binned) ** 2)).sum() + if incl_uncstr: + y_uncstr = np.linalg.solve( + E + lam * D3.T @ np.diag(weights3) @ D3, y_binned + ) + wsse_uncstr = (weights_binned * ((y_uncstr - y_binned) ** 2)).sum() else: + y_uncstr = None + wsse_uncstr = None + + def mono_interp(x_vec): + # replace last point in data with two copies further out to make smooth + # extrapolation + x_new = np.append( + x_binned[:-1], [1.005 * x_binned[-1], 1.01 * x_binned[-1]] + ) + y_cstr_new = np.append(y_cstr[:-1], [y_cstr[-1], y_cstr[-1]]) + # Create interpolating cubic spline for interior points + inter_interpl = intp(x_new, y_cstr_new, kind="cubic") + y_pred = np.zeros_like(x_vec) + x_lt_min = x_vec < x_binned.min() + x_gt_max = x_vec > x_new.max() + x_inter = (x_vec >= x_binned.min()) & (x_vec <= x_new.max()) + y_pred[x_inter] = inter_interpl(x_vec[x_inter]) + # extrapolate the maximum for values above the maximum + y_pred[x_gt_max] = y_cstr[-1] + # linear extrapolation of last two points for values below the min + slope = (y_cstr[1] - y_cstr[0]) / (x_binned[1] - x_binned[0]) + intercept = y_cstr[0] - slope * x_binned[0] + y_pred[x_lt_min] = slope * x_vec[x_lt_min] + intercept + + return y_pred + + if show_plot: plt.scatter( - x_binned, - y_binned, + x, + y, linestyle="None", - color="black", + color="gray", s=0.8, alpha=0.7, - label="Binned data averages", - ) - plt.plot( - x, - mono_interp(x), - color="red", - alpha=1.0, - label="Monotonic smooth spline", + label="All data", ) - if incl_uncstr: + if not bins: plt.plot( + x, + y_cstr, + color="red", + alpha=1.0, + label="Monotonic smooth spline", + ) + if incl_uncstr: + plt.plot( + x, + y_uncstr, + color="blue", + alpha=1.0, + label="Unconstrained smooth spline", + ) + else: + plt.scatter( x_binned, - y_uncstr, - color="blue", + y_binned, + linestyle="None", + color="black", + s=0.8, + alpha=0.7, + label="Binned data averages", + ) + plt.plot( + x, + mono_interp(x), + color="red", alpha=1.0, - label="Unconstrained smooth spline", + label="Monotonic smooth spline", ) - plt.legend(loc="lower right") - plt.show() - plt.close() + if incl_uncstr: + plt.plot( + x_binned, + y_uncstr, + color="blue", + alpha=1.0, + label="Unconstrained smooth spline", + ) + plt.legend(loc="lower right") + plt.show() + plt.close() + + return mono_interp, y_cstr, wsse_cstr, y_uncstr, wsse_uncstr - return mono_interp, y_cstr, wsse_cstr, y_uncstr, wsse_uncstr + err_msg = method + " method not supported, must be 'eilers' or 'pygam'" + raise ValueError(err_msg) diff --git a/setup.py b/setup.py index a3ce34122..1f625de4c 100755 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setuptools.setup( name="ogcore", - version="0.10.7", + version="0.10.8", author="Jason DeBacker and Richard W. Evans", license="CC0 1.0 Universal (CC0 1.0) Public Domain Dedication", description="A general equilibribum overlapping generations model for fiscal policy analysis", @@ -33,6 +33,8 @@ "distributed>=2.30.1", "paramtools>=0.15.0", "requests", + "pip", + "pygam", ], classifiers=[ "Development Status :: 2 - Pre-Alpha", diff --git a/tests/test_parameters.py b/tests/test_parameters.py index 741721fd0..b708638e7 100644 --- a/tests/test_parameters.py +++ b/tests/test_parameters.py @@ -157,7 +157,7 @@ def test_implement_bad_reform2(): assert len(specs.errors) > 0 assert specs.errors["tax_func_type"][0] == ( 'tax_func_type "not_a_functional_form" must be in list of ' - + "choices DEP, DEP_totalinc, GS, linear, mono." + + "choices DEP, DEP_totalinc, GS, linear, mono, mono2D." ) diff --git a/tests/test_txfunc.py b/tests/test_txfunc.py index d80cf81aa..f4d76e966 100644 --- a/tests/test_txfunc.py +++ b/tests/test_txfunc.py @@ -1,3 +1,5 @@ +import sys + from ogcore import txfunc from distributed import Client, LocalCluster import pytest @@ -731,6 +733,66 @@ def test_monotone_spline(): """ # Simulate some data np.random.seed(10) + + # another test case for pygam method: + + # N = 100 + # xlo = 0.001 + # xhi = 2 * np.pi + # x1 = np.arange(xlo, xhi, step=(xhi - xlo) / N) + # x2 = np.arange(xlo, xhi, step=(xhi - xlo) / N) + # x = np.array([x1, x2]).T + # X1, X2 = np.meshgrid(x1, x2) + # X1, X2 = X1.flatten(), X2.flatten() + # X = np.zeros((X1.shape[0], 2)) + # X[:,0] = X1 + # X[:,1] = X2 + # y0 = np.exp(np.sin(X1)) * np.exp(np.cos(X2)) + # y0 = np.cos(X1)/(1.01 + np.sin(X2)) + # y0 = 1/(1 + np.exp(-(X1+X2))) + # y = y0 + np.random.random(y0.shape) * 0.2 + # weights = np.ones(10000) + # ( + # mono_interp, + # y_cstr, + # wsse_cstr, + # y_uncstr, + # wsse_uncstr, + # ) = txfunc.monotone_spline( + # X, y, weights, lam=100, incl_uncstr=True, show_plot=True, method='pygam' + # ) + + data = utils.safe_read_pickle( + os.path.join(CUR_PATH, "test_io_data", "micro_data_dict_for_tests.pkl") + )["2030"] + df = data[["total_labinc", "total_capinc", "etr", "weight"]].copy() + df.replace([np.inf, -np.inf], np.nan, inplace=True) + df.dropna(inplace=True) + + # Estimate monotonically increasing function on the data + ( + mono_interp1, + y_cstr1, + wsse_cstr1, + y_uncstr1, + wsse_uncstr1, + ) = txfunc.monotone_spline( + df[["total_labinc", "total_capinc"]].values, + df["etr"].values, + df["weight"].values, + bins=[100, 100], + incl_uncstr=True, + show_plot=False, + method="pygam", + splines=[100, 100], + plot_start=25, + plot_end=75, + ) + # Test whether mono_interp is a function + assert hasattr(mono_interp1, "__call__") + # Not sure what baseline should be to add tests here for "correctness" of values + + # Simulate some data N = 100 xlo = 0.001 xhi = 2 * np.pi @@ -739,23 +801,31 @@ def test_monotone_spline(): y = y0 + np.random.randn(N) * 0.5 weights = np.ones(N) - # Estimate monotonically increasing function on the data ( - mono_interp, - y_cstr, - wsse_cstr, - y_uncstr, - wsse_uncstr, + mono_interp2, + y_cstr2, + wsse_cstr2, + y_uncstr2, + wsse_uncstr2, ) = txfunc.monotone_spline( - x, y, weights, bins=10, lam=100, incl_uncstr=True, show_plot=False + x, + y, + weights, + bins=10, + lam=100, + incl_uncstr=True, + show_plot=False, + method="eilers", ) # Test whether mono_interp is a function - assert hasattr(mono_interp, "__call__") + assert hasattr(mono_interp2, "__call__") + # Test whether mono_interp is a function + assert hasattr(mono_interp2, "__call__") # Test whether mono_interp gives the correct output x_vec_test = np.array([2.0, 5.0]) y_vec_expected = np.array([0.69512331, 1.23822669]) - y_monointerp = mono_interp(x_vec_test) + y_monointerp = mono_interp2(x_vec_test) assert np.allclose(y_monointerp, y_vec_expected) # Test that y_cstr, wsse_cstr, y_uncstr, wsse_uncstr are correct @@ -789,7 +859,7 @@ def test_monotone_spline(): ] ) wsse_uncstr_expected = 468.4894930349361 - assert np.allclose(y_cstr, y_cstr_expected) - assert np.allclose(wsse_cstr, wsse_cstr_expected) - assert np.allclose(y_uncstr, y_uncstr_expected) - assert np.allclose(wsse_uncstr, wsse_uncstr_expected) + assert np.allclose(y_cstr2, y_cstr_expected) + assert np.allclose(wsse_cstr2, wsse_cstr_expected) + assert np.allclose(y_uncstr2, y_uncstr_expected) + assert np.allclose(wsse_uncstr2, wsse_uncstr_expected)