Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Lang] [refactor] Deprecate x.shape() and x.dim(), use x.shape instead #1318

Merged
merged 5 commits into from
Jun 26, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion docs/vector.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ As global tensors of vectors
++++++++++++++++++++++++++++
.. attribute:: a[p, q, ...][i]

:parameter a: (Vector) the vector
:parameter a: (tensor of Vector) the vector
:parameter p: (scalar) index of the first tensor dimension
:parameter q: (scalar) index of the second tensor dimension
:parameter i: (scalar) index of the vector component
Expand Down Expand Up @@ -191,4 +191,22 @@ Methods
Vectors are special matrices with only 1 column. In fact, ``ti.Vector`` is just an alias of ``ti.Matrix``.


.. attribute:: a.n

:parameter a: (Vector or tensor of Vector)
:return: (scalar) return the dimensionality of vector ``a``

E.g.,
::

# Taichi-scope
a = ti.Vector([1, 2, 3])
a.n # 3

::
# Python-scope
a = ti.Vector(3, dt=ti.f32, shape=())
a.n # 3


TODO: add element wise operations docs
26 changes: 13 additions & 13 deletions python/taichi/lang/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def __setitem__(self, key, value):
key = ()
if not isinstance(key, (tuple, list)):
key = (key, )
assert len(key) == self.dim()
assert len(key) == len(self.shape)
key = key + ((0, ) *
(taichi_lang_core.get_max_num_indices() - len(key)))
self.setter(value, *key)
Expand Down Expand Up @@ -127,13 +127,13 @@ def snode(self):
def __hash__(self):
return self.ptr.get_raw_address()

def dim(self):
impl.get_runtime().try_materialize()
return self.snode().dim()

@property
def shape(self):
impl.get_runtime().try_materialize()
return self.snode().shape()
return self.snode().shape

@deprecated('x.dim()', 'len(x.shape)')
def dim(self):
return len(self.shape)

def data_type(self):
return self.snode().data_type()
Expand All @@ -142,7 +142,7 @@ def data_type(self):
def to_numpy(self):
from .meta import tensor_to_ext_arr
import numpy as np
arr = np.zeros(shape=self.shape(),
arr = np.zeros(shape=self.shape,
dtype=to_numpy_type(self.snode().data_type()))
tensor_to_ext_arr(self, arr)
import taichi as ti
Expand All @@ -153,7 +153,7 @@ def to_numpy(self):
def to_torch(self, device=None):
from .meta import tensor_to_ext_arr
import torch
arr = torch.zeros(size=self.shape(),
arr = torch.zeros(size=self.shape,
dtype=to_pytorch_type(self.snode().data_type()),
device=device)
tensor_to_ext_arr(self, arr)
Expand All @@ -163,9 +163,9 @@ def to_torch(self, device=None):

@python_scope
def from_numpy(self, arr):
assert self.dim() == len(arr.shape)
s = self.shape()
for i in range(self.dim()):
assert len(self.shape) == len(arr.shape)
s = self.shape
for i in range(len(self.shape)):
assert s[i] == arr.shape[i]
from .meta import ext_arr_to_tensor
if hasattr(arr, 'contiguous'):
Expand All @@ -182,7 +182,7 @@ def from_torch(self, arr):
def copy_from(self, other):
assert isinstance(other, Expr)
from .meta import tensor_to_tensor
assert self.dim() == other.dim()
assert len(self.shape) == len(other.shape)
tensor_to_tensor(self, other)


Expand Down
16 changes: 9 additions & 7 deletions python/taichi/lang/matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,13 +512,15 @@ def diag(dim, val):
def loop_range(self):
return self.entries[0]

@property
def shape(self):
# Took `self.entries[0]` as a representation of this tensor-of-matrices.
# https://github.com/taichi-dev/taichi/issues/1069#issuecomment-635712140
return self.loop_range().shape()
return self.loop_range().shape

@deprecated('x.dim()', 'len(x.shape)')
def dim(self):
return self.loop_range().dim()
return len(self.shape)

def data_type(self):
return self.loop_range().data_type()
Expand Down Expand Up @@ -622,7 +624,7 @@ def to_numpy(self, keep_dims=False, as_vector=None):
if not self.is_global():
return np.array(self.entries).reshape(shape_ext)

ret = np.empty(self.loop_range().shape() + shape_ext,
ret = np.empty(self.loop_range().shape + shape_ext,
dtype=to_numpy_type(
self.loop_range().snode().data_type()))
from .meta import matrix_to_ext_arr
Expand All @@ -636,7 +638,7 @@ def to_torch(self, device=None, keep_dims=False):
import torch
as_vector = self.m == 1 and not keep_dims
shape_ext = (self.n, ) if as_vector else (self.n, self.m)
ret = torch.empty(self.loop_range().shape() + shape_ext,
ret = torch.empty(self.loop_range().shape + shape_ext,
dtype=to_pytorch_type(
self.loop_range().snode().data_type()),
device=device)
Expand All @@ -648,14 +650,14 @@ def to_torch(self, device=None, keep_dims=False):

@python_scope
def from_numpy(self, ndarray):
if len(ndarray.shape) == self.loop_range().dim() + 1:
if len(ndarray.shape) == len(self.loop_range().shape) + 1:
as_vector = True
assert self.m == 1, "This matrix is not a vector"
else:
as_vector = False
assert len(ndarray.shape) == self.loop_range().dim() + 2
assert len(ndarray.shape) == len(self.loop_range().shape) + 2
dim_ext = 1 if as_vector else 2
assert len(ndarray.shape) == self.loop_range().dim() + dim_ext
assert len(ndarray.shape) == len(self.loop_range().shape) + dim_ext
from .meta import ext_arr_to_matrix
ext_arr_to_matrix(ndarray, self, as_vector)
import taichi as ti
Expand Down
24 changes: 17 additions & 7 deletions python/taichi/lang/snode.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,18 +65,28 @@ def parent(self, n=1):
def data_type(self):
return self.ptr.data_type()

@deprecated('x.dim()', 'len(x.shape)')
def dim(self):
impl.get_runtime().try_materialize()
return self.ptr.num_active_indices()
return len(self.shape)

@property
def shape(self):
impl.get_runtime().try_materialize()
return tuple(
self.ptr.get_num_elements_along_axis(i) for i in range(self.dim()))
dim = self.ptr.num_active_indices()
ret = [
self.ptr.get_num_elements_along_axis(i) for i in range(dim)]

class callable_tuple(tuple):
@deprecated('x.shape()', 'x.shape')
def __call__(self):
return self

ret = callable_tuple(ret)
return ret

@deprecated('snode.get_shape(i)', 'snode.shape()[i]')
@deprecated('x.get_shape(i)', 'x.shape[i]')
def get_shape(self, i):
return self.shape()[i]
return self.shape[i]

def loop_range(self):
import taichi as ti
Expand Down Expand Up @@ -104,7 +114,7 @@ def __repr__(self):
# ti.root.dense(ti.i, 3).dense(ti.jk, (4, 5)).place(x)
# ti.root => dense [3] => dense [3, 4, 5] => place [3, 4, 5]
type = repr(self.ptr.type)[len('SNodeType.'):]
shape = repr(list(self.shape()))
shape = repr(list(self.shape))
parent = repr(self.parent())
return f'{parent} => {type} {shape}'

Expand Down
2 changes: 1 addition & 1 deletion python/taichi/lang/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ def visit_struct_for(self, node, is_grouped):
template = '''
if 1:
___loop_var = 0
{} = ti.make_var_vector(size=___loop_var.loop_range().dim())
{} = ti.make_var_vector(size=len(___loop_var.loop_range().shape))
___expr_group = ti.make_expr_group({})
ti.core.begin_frontend_struct_for(___expr_group, ___loop_var.loop_range().ptr)
ti.core.end_frontend_range_for()
Expand Down
2 changes: 1 addition & 1 deletion python/taichi/misc/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def set_image(self, img):
import taichi as ti

if isinstance(img, ti.Expr):
if ti.core.is_integral(img.data_type()) or len(img.shape()) != 2:
if ti.core.is_integral(img.data_type()) or len(img.shape) != 2:
# Images of uint is not optimized by xxx_to_image
self.img = self.cook_image(img.to_numpy())
else:
Expand Down
2 changes: 1 addition & 1 deletion tests/python/test_linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def fill():
assert m4[0][j, i] == int(i + 3 * j + 1)


# Remove this once the apis are fully deprecated in incoming version.
# TODO: Remove this once the apis are fully deprecated in incoming version.
@pytest.mark.filterwarnings('ignore')
@ti.host_arch_only
def test_init_matrix_from_vectors_deprecated():
Expand Down
36 changes: 28 additions & 8 deletions tests/python/test_tensor_reflection.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import taichi as ti
import pytest


@ti.all_archs
Expand All @@ -11,8 +12,7 @@ def test_POT():

ti.root.dense(ti.i, n).dense(ti.j, m).dense(ti.k, p).place(val)

assert val.shape() == (n, m, p)
assert val.dim() == 3
assert val.shape == (n, m, p)
assert val.data_type() == ti.i32


Expand All @@ -29,8 +29,7 @@ def test_non_POT():
blk3 = blk2.dense(ti.k, p)
blk3.place(val)

assert val.shape() == (n, m, p)
assert val.dim() == 3
assert val.shape == (n, m, p)
assert val.data_type() == ti.i32


Expand All @@ -48,8 +47,7 @@ def test_unordered():
blk3.place(val)

assert val.data_type() == ti.i32
assert val.shape() == (n, m, p)
assert val.dim() == 3
assert val.shape == (n, m, p)
assert val.snode().parent(0) == val.snode()
assert val.snode().parent() == blk3
assert val.snode().parent(1) == blk3
Expand Down Expand Up @@ -80,12 +78,34 @@ def test_unordered_matrix():
blk3 = blk2.dense(ti.j, p)
blk3.place(val)

assert val.dim() == 3
assert val.shape() == (n, m, p)
assert val.shape == (n, m, p)
assert val.data_type() == ti.i32
assert val.loop_range().snode().parent(0) == val.loop_range().snode()
assert val.loop_range().snode().parent() == blk3
assert val.loop_range().snode().parent(1) == blk3
assert val.loop_range().snode().parent(2) == blk2
assert val.loop_range().snode().parent(3) == blk1
assert val.loop_range().snode().parent(4) == ti.root


@pytest.mark.filterwarnings('ignore')
@ti.host_arch_only
def test_deprecated():
val = ti.var(ti.f32)
mat = ti.Matrix(3, 2, ti.i32)

n = 3
m = 7
p = 11

blk1 = ti.root.dense(ti.k, n)
blk2 = blk1.dense(ti.i, m)
blk3 = blk2.dense(ti.j, p)
blk3.place(val, mat)

assert val.dim() == 3
assert val.shape() == (n, m, p)
assert mat.dim() == 3
assert mat.shape() == (n, m, p)
assert blk3.dim() == 3
assert blk3.shape() == (n, m, p)
2 changes: 1 addition & 1 deletion tests/python/test_tuple_assign.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def test_unpack_from_shape():

@ti.kernel
def func():
a[None], b[None], c[None] = d.shape()
a[None], b[None], c[None] = d.shape

func()
assert a[None] == 2
Expand Down