Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Add] test atleast_xd pir backward #59365

Merged
merged 4 commits into from
Dec 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 39 additions & 6 deletions python/paddle/tensor/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4082,8 +4082,19 @@ def atleast_1d(*inputs, name=None):
[[1.23000002]])]
"""
out = []
for tensor in inputs:
tensor = paddle.to_tensor(tensor)
for input in inputs:
if not isinstance(
input,
(
paddle.Tensor,
paddle.base.framework.Variable,
paddle.base.libpaddle.pir.OpResult,
),
):
tensor = paddle.to_tensor(input)
else:
tensor = input

if tensor.dim() == 0:
result = tensor.reshape((1,))
else:
Expand Down Expand Up @@ -4139,8 +4150,19 @@ def atleast_2d(*inputs, name=None):
[[[1.23000002]]])]
"""
out = []
for tensor in inputs:
tensor = paddle.to_tensor(tensor)
for input in inputs:
if not isinstance(
input,
(
paddle.Tensor,
paddle.base.framework.Variable,
paddle.base.libpaddle.pir.OpResult,
),
):
tensor = paddle.to_tensor(input)
else:
tensor = input

if tensor.dim() == 0:
result = tensor.reshape((1, 1))
elif tensor.dim() == 1:
Expand Down Expand Up @@ -4198,8 +4220,19 @@ def atleast_3d(*inputs, name=None):
[[[[1.23000002]]]])]
"""
out = []
for tensor in inputs:
tensor = paddle.to_tensor(tensor)
for input in inputs:
if not isinstance(
input,
(
paddle.Tensor,
paddle.base.framework.Variable,
paddle.base.libpaddle.pir.OpResult,
),
):
tensor = paddle.to_tensor(input)
else:
tensor = input

if tensor.dim() == 0:
result = tensor.reshape((1, 1, 1))
elif tensor.dim() == 1:
Expand Down
45 changes: 39 additions & 6 deletions test/legacy_test/test_atleast_xd.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ def generate_data(ndim, count=1, max_size=4, mix=False, dtype='int32'):
a list of data like:
[[data, dtype, shape, name], [data, dtype, shape, name] ... ]
"""

rtn = []
for d in range(ndim):
data = [
Expand Down Expand Up @@ -172,7 +171,11 @@ def _test_static_api(
shape = shapes[i]
dtype = dtypes[i]
name = names[i]
x.append(paddle.static.data(name, shape, dtype))

_x = paddle.static.data(name, shape, dtype)
_x.stop_gradient = False
x.append(_x)

# the data feeded should NOT be a Tensor
feed[name] = (
input.numpy()
Expand All @@ -181,12 +184,39 @@ def _test_static_api(
)

out = func(*x)
exe = paddle.static.Executor(place)
res = exe.run(feed=feed, fetch_list=[out])

# unwrap inputs when lenght 1
if len(inputs) == 1:
res = res[0]
out.stop_gradient = False
y = x[0]
_out = out
else:
for o in out:
o.stop_gradient = False
y = x[0]
_out = out[0]

z = _out * 123

fetch_list = [out]
if paddle.framework.in_pir_mode():
grads = paddle.autograd.ir_backward.grad(z, y)
out_grad = grads[0]
fetch_list.append(out_grad)
else:
paddle.static.append_backward(z)
out_grad = y.grad_name
fetch_list.append(out_grad)

exe = paddle.static.Executor(place)
*res, res_grad = exe.run(feed=feed, fetch_list=fetch_list)

# not check old ir
if paddle.framework.in_pir_mode():
# convert grad value to bool if dtype is bool
grad_value = 123.0 if dtypes[0] != 'bool' else True
np.testing.assert_allclose(
res_grad, np.ones_like(y) * grad_value
)

out_ref = func_ref(
func_type,
Expand All @@ -198,6 +228,9 @@ def _test_static_api(
]
)

if len(inputs) == 1:
out_ref = [out_ref]

for n, p in zip(out_ref, res):
np.testing.assert_allclose(n, p, rtol=RTOL, atol=ATOL)

Expand Down