Skip to content

Commit

Permalink
[skip ci] Merge branch 'master' into static-assert
Browse files Browse the repository at this point in the history
  • Loading branch information
archibate committed Jun 29, 2020
2 parents 6590d0f + c83a7e3 commit ca33871
Show file tree
Hide file tree
Showing 11 changed files with 91 additions and 6 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ project(taichi)

SET(TI_VERSION_MAJOR 0)
SET(TI_VERSION_MINOR 6)
SET(TI_VERSION_PATCH 13)
SET(TI_VERSION_PATCH 14)

execute_process(
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
Expand Down
2 changes: 1 addition & 1 deletion docs/version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.6.13
0.6.14
4 changes: 2 additions & 2 deletions misc/ci_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
) * 8 == 64, "Only 64-bit platforms are supported. Current platform: {}".format(
struct.calcsize('P') * 8)

if sys.version_info[0] < 3 or sys.version_info[1] < 5:
print("\nPlease restart with python3. \n(Taichi supports Python 3.5+)\n")
if sys.version_info[0] < 3 or sys.version_info[1] < 6:
print("\nPlease restart with python3. \n(Taichi supports Python 3.6+)\n")
print("Current version:", sys.version_info)
exit(-1)

Expand Down
4 changes: 4 additions & 0 deletions python/taichi/lang/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,10 @@ def is_active(l, indices):
ti_core.insert_is_active(l.snode().ptr, make_expr_group(indices)))


def activate(l, indices):
ti_core.insert_activate(l.snode().ptr, make_expr_group(indices))


def deactivate(l, indices):
ti_core.insert_deactivate(l.snode().ptr, make_expr_group(indices))

Expand Down
3 changes: 3 additions & 0 deletions taichi/backends/metal/codegen_metal.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,9 @@ class KernelCodegen : public IRVisitor {
// is_active(device byte *addr, SNodeMeta meta, int i);
emit("{} = is_active({}, {}, {});", result_var, ch_addr,
kSNodeMetaVarName, ch_id);
} else if (opty == SNodeOpType::activate) {
// activate(device byte *addr, SNodeMeta meta, int i);
emit("activate({}, {}, {});", ch_addr, kSNodeMetaVarName, ch_id);
} else if (opty == SNodeOpType::deactivate) {
// deactivate(device byte *addr, SNodeMeta meta, int i);
emit("deactivate({}, {}, {});", ch_addr, kSNodeMetaVarName, ch_id);
Expand Down
3 changes: 3 additions & 0 deletions taichi/codegen/codegen_llvm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -918,6 +918,9 @@ void CodeGenLLVM::visit(SNodeOpStmt *stmt) {
} else if (stmt->op_type == SNodeOpType::is_active) {
llvm_val[stmt] =
call(snode, llvm_val[stmt->ptr], "is_active", {llvm_val[stmt->val]});
} else if (stmt->op_type == SNodeOpType::activate) {
llvm_val[stmt] =
call(snode, llvm_val[stmt->ptr], "activate", {llvm_val[stmt->val]});
} else if (stmt->op_type == SNodeOpType::deactivate) {
if (snode->type == SNodeType::pointer || snode->type == SNodeType::hash ||
snode->type == SNodeType::bitmasked) {
Expand Down
3 changes: 2 additions & 1 deletion taichi/ir/ir.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -915,7 +915,8 @@ SNodeOpStmt::SNodeOpStmt(SNodeOpType op_type,
ptr = nullptr;
val = nullptr;
TI_ASSERT(op_type == SNodeOpType::is_active ||
op_type == SNodeOpType::deactivate);
op_type == SNodeOpType::deactivate ||
op_type == SNodeOpType::activate);
width() = 1;
element_type() = DataType::i32;
TI_STMT_REG_FIELDS;
Expand Down
4 changes: 4 additions & 0 deletions taichi/python/export_lang.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,10 @@ void export_lang(py::module &m) {
return Deactivate(snode, indices);
});

m.def("insert_activate", [](SNode *snode, const ExprGroup &indices) {
return Activate(snode, indices);
});

m.def("insert_append",
[](SNode *snode, const ExprGroup &indices, const Expr &val) {
return Append(snode, indices, val);
Expand Down
5 changes: 4 additions & 1 deletion taichi/transforms/lower_ast.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -377,11 +377,14 @@ class LowerAST : public IRVisitor {
fctx.push_back<SNodeOpStmt>(stmt->op_type, stmt->snode, ptr, val_stmt);
} else if (stmt->snode->type == SNodeType::pointer ||
stmt->snode->type == SNodeType::hash ||
stmt->snode->type == SNodeType::dynamic ||
stmt->snode->type == SNodeType::dense ||
stmt->snode->type == SNodeType::bitmasked) {
TI_ASSERT(SNodeOpStmt::activation_related(stmt->op_type));
fctx.push_back<SNodeOpStmt>(stmt->op_type, stmt->snode, indices_stmt);
} else {
TI_ERROR("The {} operation is not supported on {} SNode",
snode_op_type_name(stmt->op_type),
snode_type_name(stmt->snode->type));
TI_NOT_IMPLEMENTED
}

Expand Down
32 changes: 32 additions & 0 deletions tests/python/test_sparse_activate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import taichi as ti


@ti.archs_support_sparse
def test_pointer():
x = ti.var(ti.f32)
s = ti.var(ti.i32)

n = 16

ptr = ti.root.pointer(ti.i, n)
ptr.dense(ti.i, n).place(x)
ti.root.place(s)

s[None] = 0

@ti.kernel
def activate():
ti.activate(ptr, 1)
ti.activate(ptr, 32)

@ti.kernel
def func():
for i in x:
s[None] += 1

activate()
func()
assert s[None] == 32


test_pointer()
35 changes: 35 additions & 0 deletions tests/python/test_sparse_deactivate.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,41 @@ def deactivate():
assert s[None] == 16


@ti.archs_support_sparse
def test_pointer1():
x = ti.var(ti.f32)
s = ti.var(ti.i32)

n = 16

ptr = ti.root.pointer(ti.i, n)
ptr.dense(ti.i, n).place(x)
ti.root.place(s)

s[None] = 0

@ti.kernel
def func():
for i in x:
s[None] += 1

x[0] = 1
x[19] = 1
x[20] = 1
x[45] = 1
func()
assert s[None] == 48

@ti.kernel
def deactivate():
ti.deactivate(ptr, 4)

deactivate()
s[None] = 0
func()
assert s[None] == 32


@ti.archs_support_sparse
def test_pointer2():
x = ti.var(ti.f32)
Expand Down

0 comments on commit ca33871

Please sign in to comment.