Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

paddle frontend cholesky_solve added #21291

Merged
merged 9 commits into from
Aug 17, 2023
10 changes: 10 additions & 0 deletions ivy/functional/frontends/paddle/tensor/linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,16 @@ def solve(x1, x2, name=None):
return ivy.solve(x1, x2)


# cholesky_solve
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def cholesky_solve(y, x, /, *, upper=False, name=None):
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved
if upper:
x = ivy.matrix_transpose(x)
Y = ivy.solve(x, y)
return ivy.solve(ivy.matrix_transpose(x), Y)


# cholesky
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
import ivy
from hypothesis import strategies as st, assume
import numpy as np
import sys

# local
import ivy_tests.test_ivy.helpers as helpers
from ivy_tests.test_ivy.helpers import assert_all_close
from ivy_tests.test_ivy.helpers import handle_frontend_test, matrix_is_stable


# Helpers #
# ------ #

Expand Down Expand Up @@ -528,6 +528,87 @@ def test_paddle_solve(
)


# cholesky_solve
@st.composite
def _get_cholesky_matrix(draw):
# batch_shape, random_size, shared
input_dtype = draw(
st.shared(
st.sampled_from(draw(helpers.get_dtypes("float"))),
key="shared_dtype",
)
)
shared_size = draw(
st.shared(helpers.ints(min_value=2, max_value=4), key="shared_size")
)
gen = draw(
helpers.array_values(
dtype=input_dtype,
shape=tuple([shared_size, shared_size]),
min_value=2,
max_value=5,
).filter(lambda x: np.linalg.cond(x.tolist()) < 1 / sys.float_info.epsilon)
)
spd = np.matmul(gen.T, gen) + np.identity(gen.shape[0])
spd_chol = np.linalg.cholesky(spd)
probability = draw(st.floats(min_value=0, max_value=1))
if probability > 0.5:
spd_chol = spd_chol.T # randomly transpose the matrix
return input_dtype, spd_chol


@st.composite
def _get_second_matrix(draw):
# batch_shape, shared, random_size
input_dtype = draw(
st.shared(
st.sampled_from(draw(helpers.get_dtypes("float"))),
key="shared_dtype",
)
)
shared_size = draw(
st.shared(helpers.ints(min_value=2, max_value=4), key="shared_size")
)
return input_dtype, draw(
helpers.array_values(
dtype=input_dtype, shape=tuple([shared_size, 1]), min_value=2, max_value=5
)
)
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved


@handle_frontend_test(
fn_tree="paddle.tensor.linalg.cholesky_solve",
x=_get_cholesky_matrix(),
y=_get_second_matrix(),
test_with_out=st.just(False),
)
def test_paddle_cholesky_solve(
*,
x,
y,
frontend,
backend_fw,
test_flags,
fn_tree,
on_device,
):
input_dtype1, x1 = x
input_dtype2, x2 = y
helpers.test_frontend_function(
input_dtypes=[input_dtype1, input_dtype2],
frontend=frontend,
backend_to_test=backend_fw,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
rtol=1e-3,
atol=1e-3,
x=x2,
y=x1,
upper=np.array_equal(x1, np.triu(x1)), # check whether the matrix is upper
)


# cholesky
@handle_frontend_test(
fn_tree="paddle.tensor.linalg.cholesky",
Expand Down