Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Lang] Deprecate ext_arr/any_arr in favor of types.ndarray #4598

Merged
merged 6 commits into from
Mar 23, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion python/taichi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,9 @@
'imshow': 'tools.imshow',
'imwrite': 'tools.imwrite',
'quant': 'types.quantized_types.quant',
'type_factory': 'types.quantized_types.type_factory'
'type_factory': 'types.quantized_types.type_factory',
'ext_arr': 'types.ndarray',
'any_arr': 'types.ndarray'
}

__customized_deprecations__ = {
Expand Down
38 changes: 23 additions & 15 deletions python/taichi/_kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
from taichi.lang.kernel_impl import kernel
from taichi.lang.runtime_ops import sync
from taichi.lang.snode import deactivate
from taichi.types.annotations import any_arr, ext_arr, template
from taichi.types import ndarray_type
from taichi.types.annotations import template
from taichi.types.primitive_types import f16, f32, f64, u8


Expand All @@ -19,31 +20,33 @@ def fill_tensor(tensor: template(), val: template()):


@kernel
def fill_ndarray(ndarray: any_arr(), val: template()):
def fill_ndarray(ndarray: ndarray_type.ndarray(), val: template()):
for I in grouped(ndarray):
ndarray[I] = val


@kernel
def fill_ndarray_matrix(ndarray: any_arr(), val: template()):
def fill_ndarray_matrix(ndarray: ndarray_type.ndarray(), val: template()):
for I in grouped(ndarray):
ndarray[I].fill(val)


@kernel
def tensor_to_ext_arr(tensor: template(), arr: ext_arr()):
def tensor_to_ext_arr(tensor: template(), arr: ndarray_type.ndarray()):
for I in grouped(tensor):
arr[I] = tensor[I]


@kernel
def ndarray_to_ext_arr(ndarray: any_arr(), arr: ext_arr()):
def ndarray_to_ext_arr(ndarray: ndarray_type.ndarray(),
arr: ndarray_type.ndarray()):
for I in grouped(ndarray):
arr[I] = ndarray[I]


@kernel
def ndarray_matrix_to_ext_arr(ndarray: any_arr(), arr: ext_arr(),
def ndarray_matrix_to_ext_arr(ndarray: ndarray_type.ndarray(),
arr: ndarray_type.ndarray(),
layout_is_aos: template(),
as_vector: template()):
for I in grouped(ndarray):
Expand All @@ -62,7 +65,7 @@ def ndarray_matrix_to_ext_arr(ndarray: any_arr(), arr: ext_arr(),


@kernel
def vector_to_fast_image(img: template(), out: ext_arr()):
def vector_to_fast_image(img: template(), out: ndarray_type.ndarray()):
# FIXME: Why is ``for i, j in img:`` slower than:
for i, j in ndrange(*img.shape):
r, g, b = 0, 0, 0
Expand All @@ -88,7 +91,7 @@ def vector_to_fast_image(img: template(), out: ext_arr()):


@kernel
def tensor_to_image(tensor: template(), arr: ext_arr()):
def tensor_to_image(tensor: template(), arr: ndarray_type.ndarray()):
for I in grouped(tensor):
t = ops.cast(tensor[I], f32)
arr[I, 0] = t
Expand All @@ -97,7 +100,7 @@ def tensor_to_image(tensor: template(), arr: ext_arr()):


@kernel
def vector_to_image(mat: template(), arr: ext_arr()):
def vector_to_image(mat: template(), arr: ndarray_type.ndarray()):
for I in grouped(mat):
for p in static(range(mat.n)):
arr[I, p] = ops.cast(mat[I][p], f32)
Expand All @@ -112,25 +115,28 @@ def tensor_to_tensor(tensor: template(), other: template()):


@kernel
def ext_arr_to_tensor(arr: ext_arr(), tensor: template()):
def ext_arr_to_tensor(arr: ndarray_type.ndarray(), tensor: template()):
for I in grouped(tensor):
tensor[I] = arr[I]


@kernel
def ndarray_to_ndarray(ndarray: any_arr(), other: any_arr()):
def ndarray_to_ndarray(ndarray: ndarray_type.ndarray(),
other: ndarray_type.ndarray()):
for I in grouped(ndarray):
ndarray[I] = other[I]


@kernel
def ext_arr_to_ndarray(arr: ext_arr(), ndarray: any_arr()):
def ext_arr_to_ndarray(arr: ndarray_type.ndarray(),
ndarray: ndarray_type.ndarray()):
for I in grouped(ndarray):
ndarray[I] = arr[I]


@kernel
def ext_arr_to_ndarray_matrix(arr: ext_arr(), ndarray: any_arr(),
def ext_arr_to_ndarray_matrix(arr: ndarray_type.ndarray(),
ndarray: ndarray_type.ndarray(),
layout_is_aos: template(),
as_vector: template()):
for I in grouped(ndarray):
Expand All @@ -149,7 +155,8 @@ def ext_arr_to_ndarray_matrix(arr: ext_arr(), ndarray: any_arr(),


@kernel
def matrix_to_ext_arr(mat: template(), arr: ext_arr(), as_vector: template()):
def matrix_to_ext_arr(mat: template(), arr: ndarray_type.ndarray(),
as_vector: template()):
for I in grouped(mat):
for p in static(range(mat.n)):
for q in static(range(mat.m)):
Expand All @@ -160,7 +167,8 @@ def matrix_to_ext_arr(mat: template(), arr: ext_arr(), as_vector: template()):


@kernel
def ext_arr_to_matrix(arr: ext_arr(), mat: template(), as_vector: template()):
def ext_arr_to_matrix(arr: ndarray_type.ndarray(), mat: template(),
as_vector: template()):
for I in grouped(mat):
for p in static(range(mat.n)):
for q in static(range(mat.m)):
Expand Down
11 changes: 6 additions & 5 deletions python/taichi/aot/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from taichi.lang.enums import Layout
from taichi.lang.field import ScalarField
from taichi.lang.matrix import MatrixField, MatrixNdarray, VectorNdarray
from taichi.types.annotations import ArgAnyArray, template
from taichi.types.annotations import template
from taichi.types.ndarray_type import NdarrayType
from taichi.types.primitive_types import f32


Expand Down Expand Up @@ -126,7 +127,7 @@ def add_kernel(self, kernel_fn, example_any_arrays=None, name=None):

Args:
kernel_fn (Function): the function decorated by taichi `kernel`.
example_any_arrays (Dict[int, ti.ndarray]): a dict where key is arg_id and key is example any_arr input.
example_any_arrays (Dict[int, ti.ndarray]): a dict where key is arg_id and key is example ndarray input.
name (str): Name to identify this kernel in the module. If not
provided, uses the built-in ``__name__`` attribute of `kernel_fn`.

Expand All @@ -137,14 +138,14 @@ def add_kernel(self, kernel_fn, example_any_arrays=None, name=None):
injected_args = []
num_arr = len([
anno for anno in kernel.argument_annotations
if isinstance(anno, ArgAnyArray)
if isinstance(anno, NdarrayType)
])
assert example_any_arrays is None or num_arr == len(
example_any_arrays
), f'Need {num_arr} example any_arr inputs but got {len(example_any_arrays)}'
), f'Need {num_arr} example ndarray inputs but got {len(example_any_arrays)}'
i = 0
for anno in kernel.argument_annotations:
if isinstance(anno, ArgAnyArray):
if isinstance(anno, NdarrayType):
if example_any_arrays:
injected_args.append(example_any_arrays[i])
else:
Expand Down
7 changes: 0 additions & 7 deletions python/taichi/lang/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import platform

from taichi._lib import core as _ti_core
from taichi.lang import impl
from taichi.lang._ndarray import *
from taichi.lang._ndrange import ndrange
Expand All @@ -17,10 +14,6 @@
from taichi.lang.snode import *
from taichi.lang.source_builder import *
from taichi.lang.struct import *
from taichi.types.annotations import any_arr, ext_arr, template
from taichi.types.primitive_types import f16, f32, f64, i32, i64, u32, u64

from taichi import _logging, _snode

__all__ = [
s for s in dir() if not s.startswith('_') and s not in [
Expand Down
6 changes: 3 additions & 3 deletions python/taichi/lang/ast/ast_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from taichi.lang.exception import TaichiSyntaxError
from taichi.lang.matrix import MatrixType
from taichi.lang.util import is_taichi_class, to_taichi_type
from taichi.types import annotations, primitive_types
from taichi.types import annotations, ndarray_type, primitive_types

if version_info < (3, 9):
from astunparse import unparse
Expand Down Expand Up @@ -461,10 +461,10 @@ def transform_as_kernel():
kernel_arguments.decl_sparse_matrix(
to_taichi_type(ctx.arg_features[i])))
elif isinstance(ctx.func.argument_annotations[i],
annotations.any_arr):
ndarray_type.NdarrayType):
ctx.create_variable(
arg.arg,
kernel_arguments.decl_any_arr_arg(
kernel_arguments.decl_ndarray_arg(
to_taichi_type(ctx.arg_features[i][0]),
ctx.arg_features[i][1], ctx.arg_features[i][2],
ctx.arg_features[i][3]))
Expand Down
2 changes: 1 addition & 1 deletion python/taichi/lang/kernel_arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def decl_sparse_matrix(dtype):
value_type)


def decl_any_arr_arg(dtype, dim, element_shape, layout):
def decl_ndarray_arg(dtype, dim, element_shape, layout):
dtype = cook_dtype(dtype)
element_dim = len(element_shape)
arg_id = impl.get_runtime().prog.decl_arr_arg(dtype, dim, element_shape)
Expand Down
18 changes: 11 additions & 7 deletions python/taichi/lang/kernel_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from taichi.lang.matrix import Matrix, MatrixType
from taichi.lang.shell import _shell_pop_print, oinspect
from taichi.lang.util import has_pytorch, to_taichi_type
from taichi.types import (any_arr, primitive_types, sparse_matrix_builder,
from taichi.types import (ndarray_type, primitive_types, sparse_matrix_builder,
template)

from taichi import _logging
Expand Down Expand Up @@ -292,7 +292,7 @@ def extract_arg(arg, anno):
TaichiCallableTemplateMapper.extract_arg(item, anno)
for item in arg)
return arg
if isinstance(anno, any_arr):
if isinstance(anno, ndarray_type.NdarrayType):
if isinstance(arg, taichi.lang._ndarray.ScalarNdarray):
anno._check_element_dim(arg, 0)
anno._check_element_shape(())
Expand All @@ -317,7 +317,7 @@ def extract_arg(arg, anno):
shape = tuple(arg.shape)
if len(shape) < element_dim:
raise ValueError(
f"Invalid argument into ti.any_arr() - required element_dim={element_dim}, "
f"Invalid argument into ti.types.ndarray() - required element_dim={element_dim}, "
f"but the argument has only {len(shape)} dimensions")
element_shape = (
) if element_dim == 0 else shape[:
Expand Down Expand Up @@ -428,7 +428,8 @@ def extract_arguments(self):
raise TaichiSyntaxError(
'Taichi kernels parameters must be type annotated')
else:
if isinstance(annotation, (template, any_arr)):
if isinstance(annotation,
(template, ndarray_type.NdarrayType)):
pass
elif id(annotation) in primitive_types.type_ids:
pass
Expand Down Expand Up @@ -568,12 +569,15 @@ def func__(*args):
elif isinstance(needed, sparse_matrix_builder):
# Pass only the base pointer of the ti.types.sparse_matrix_builder() argument
launch_ctx.set_arg_int(actual_argument_slot, v._get_addr())
elif isinstance(needed, any_arr) and isinstance(
v, taichi.lang._ndarray.Ndarray):
elif isinstance(needed,
ndarray_type.NdarrayType) and isinstance(
v, taichi.lang._ndarray.Ndarray):
has_external_arrays = True
v = v.arr
launch_ctx.set_arg_ndarray(actual_argument_slot, v)
elif isinstance(needed, any_arr) and (self.match_ext_arr(v)):
elif isinstance(
needed,
ndarray_type.NdarrayType) and (self.match_ext_arr(v)):
has_external_arrays = True
is_numpy = isinstance(v, np.ndarray)
if is_numpy:
Expand Down
1 change: 1 addition & 0 deletions python/taichi/types/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from taichi.types.annotations import *
from taichi.types.compound_types import *
from taichi.types.ndarray_type import *
from taichi.types.primitive_types import *
from taichi.types.quantized_types import *
from taichi.types.utils import *
95 changes: 1 addition & 94 deletions python/taichi/types/annotations.py
Original file line number Diff line number Diff line change
@@ -1,96 +1,3 @@
class ArgAnyArray:
"""Type annotation for arbitrary arrays, including external arrays and Taichi ndarrays.

For external arrays, we can treat it as a Taichi field with Vector or Matrix elements by specifying element dim and layout.
For Taichi vector/matrix ndarrays, we will automatically identify element dim and layout. If they are explicitly specified, we will check compatibility between the actual arguments and the annotation.

Args:
element_dim (Union[Int, NoneType], optional): None if not specified (will be treated as 0 for external arrays), 0 if scalar elements, 1 if vector elements, and 2 if matrix elements.
element_shape (Union[Tuple[Int], NoneType]): None if not specified, shapes of each element. For example, element_shape must be 1d for vector and 2d tuple for matrix. This argument is ignored for external arrays for now.
field_dim (Union[Int, NoneType]): None if not specified, number of field dimensions. This argument is ignored for external arrays for now.
layout (Union[Layout, NoneType], optional): None if not specified (will be treated as Layout.AOS for external arrays), Layout.AOS or Layout.SOA.
"""
def __init__(self,
element_dim=None,
element_shape=None,
field_dim=None,
layout=None):
if element_dim is not None and (element_dim < 0 or element_dim > 2):
raise ValueError(
"Only scalars, vectors, and matrices are allowed as elements of ti.any_arr()"
)
if element_dim is not None and element_shape is not None and len(
element_shape) != element_dim:
raise ValueError(
f"Both element_shape and element_dim are specified, but shape doesn't match specified dim: {len(element_shape)}!={element_dim}"
)
self.element_shape = element_shape
self.element_dim = len(
element_shape) if element_shape is not None else element_dim
self.field_dim = field_dim
self.layout = layout

def _check_element_dim(self, arg, arg_dim):
if self.element_dim is not None and self.element_dim != arg_dim:
raise ValueError(
f"Invalid argument into ti.any_arr() - required element_dim={self.element_dim}, but {arg} is provided"
)

def _check_layout(self, arg):
if self.layout is not None and self.layout != arg.layout:
raise ValueError(
f"Invalid argument into ti.any_arr() - required layout={self.layout}, but {arg} is provided"
)

def _check_element_shape(self, shapes):
if self.element_shape is not None and shapes != self.element_shape:
raise ValueError(
f"Invalid argument into ti.any_arr() - required element_shape={self.element_shape}, but {shapes} is provided"
)

def _check_field_dim(self, field_dim):
if self.field_dim is not None and field_dim != self.field_dim:
raise ValueError(
f"Invalid argument into ti.any_arr() - required field_dim={self.field_dim}, but {field_dim} is provided"
)


def ext_arr():
"""Type annotation for external arrays.

External arrays are formally defined as the data from other Python frameworks.
For now, Taichi supports numpy and pytorch.

Example::

>>> @ti.kernel
>>> def to_numpy(arr: ti.ext_arr()):
>>> for i in x:
>>> arr[i] = x[i]
>>>
>>> arr = numpy.zeros(...)
>>> to_numpy(arr) # `arr` will be filled with `x`'s data.
"""
return ArgAnyArray()


any_arr = ArgAnyArray
"""Alias for :class:`~taichi.types.annotations.ArgAnyArray`.

Example::

>>> @ti.kernel
>>> def to_numpy(x: ti.any_arr(), y: ti.any_arr()):
>>> for i in range(n):
>>> x[i] = y[i]
>>>
>>> y = ti.ndarray(ti.f64, shape=n)
>>> ... # calculate y
>>> x = numpy.zeros(n)
>>> to_numpy(x, y) # `x` will be filled with `y`'s data.
"""


class Template:
"""Type annotation for template kernel parameter.

Expand All @@ -114,4 +21,4 @@ class sparse_matrix_builder:
pass


__all__ = ['ext_arr', 'any_arr', 'template', 'sparse_matrix_builder']
__all__ = ['template', 'sparse_matrix_builder']
Loading