Skip to content

Commit

Permalink
Re-enable custom op support
Browse files Browse the repository at this point in the history
  • Loading branch information
nithinsubbiah committed Aug 9, 2022
1 parent b696362 commit 1fb97ac
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 37 deletions.
23 changes: 12 additions & 11 deletions build_tools/update_shape_lib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,27 @@
# For more information on supporting custom operators, see:
# ${TORCH_MLIR}/python/torch_mlir/_torch_mlir_custom_op_example/README.md

set -euo pipefail
set -eo pipefail

src_dir="$(realpath "$(dirname "$0")"/..)"
build_dir="$(realpath "${TORCH_MLIR_BUILD_DIR:-$src_dir/build}")"
torch_transforms_cpp_dir="${src_dir}/lib/Dialect/Torch/Transforms"
python_packages_dir="${build_dir}/tools/torch-mlir/python_packages"

pypath="${python_packages_dir}/torch_mlir"
# TODO: Re-enable once custom op support is back.
#if [ ! -z ${TORCH_MLIR_EXT_PYTHONPATH} ]; then
# pypath="${pypath}:${TORCH_MLIR_EXT_PYTHONPATH}"
#fi
#ext_module="torch_mlir._torch_mlir_custom_op_example"
#if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
if [ ! -z ${TORCH_MLIR_EXT_PYTHONPATH} ]; then
pypath="${pypath}:${TORCH_MLIR_EXT_PYTHONPATH}"
fi
# TODO: reenable custom op example
# ext_module="torch_mlir._torch_mlir_custom_op_example"
# if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
# ext_module="${ext_module},${TORCH_MLIR_EXT_MODULES} "
#fi
# fi
if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
ext_module="${TORCH_MLIR_EXT_MODULES} "
fi

PYTHONPATH="${pypath}" python \
-m torch_mlir.dialects.torch.importer.jit_ir.build_tools.shape_lib_gen \
--pytorch_op_extensions=${ext_module} \
--torch_transforms_cpp_dir="${torch_transforms_cpp_dir}"

# TODO: Add back to shape_lib_gen invocation once custom op support is back.
# --pytorch_op_extensions=${ext_module} \
23 changes: 12 additions & 11 deletions build_tools/update_torch_ods.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,28 @@
# For more information on supporting custom operators, see:
# ${TORCH_MLIR}/python/torch_mlir/_torch_mlir_custom_op_example/README.md

set -euo pipefail
set -eo pipefail

src_dir="$(realpath "$(dirname "$0")"/..)"
build_dir="$(realpath "${TORCH_MLIR_BUILD_DIR:-$src_dir/build}")"
torch_ir_include_dir="${src_dir}/include/torch-mlir/Dialect/Torch/IR"
python_packages_dir="${build_dir}/tools/torch-mlir/python_packages"

pypath="${python_packages_dir}/torch_mlir"
# TODO: Re-enable once custom op support is back.
#if [ ! -z ${TORCH_MLIR_EXT_PYTHONPATH} ]; then
# pypath="${pypath}:${TORCH_MLIR_EXT_PYTHONPATH}"
#fi
#ext_module="torch_mlir._torch_mlir_custom_op_example"
#if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
if [ ! -z ${TORCH_MLIR_EXT_PYTHONPATH} ]; then
pypath="${pypath}:${TORCH_MLIR_EXT_PYTHONPATH}"
fi
# TODO: reenable custom op example
# ext_module="torch_mlir._torch_mlir_custom_op_example"
# if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
# ext_module="${ext_module},${TORCH_MLIR_EXT_MODULES}"
#fi
# fi
if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then
ext_module="${TORCH_MLIR_EXT_MODULES}"
fi

PYTHONPATH="${pypath}" python \
-m torch_mlir.dialects.torch.importer.jit_ir.build_tools.torch_ods_gen \
--torch_ir_include_dir="${torch_ir_include_dir}" \
--pytorch_op_extensions="${ext_module}" \
--debug_registry_dump="${torch_ir_include_dir}/JITOperatorRegistryDump.txt"

# TODO: Add back to torch_ods_gen invocation once custom op support is back.
# --pytorch_op_extensions="${ext_module}" \
4 changes: 2 additions & 2 deletions lib/Conversion/TorchToLinalg/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
add_mlir_conversion_library(TorchMLIRTorchToLinalg
# TODO: Re-enable after MacOS support is fixed for the custom op extension.
# CustomOpExample.cpp
# TODO: reenable custom op example
# CustomOpExample.cpp
DataMovement.cpp
IndirectDataMovement.cpp
Linear.cpp
Expand Down
3 changes: 2 additions & 1 deletion lib/Conversion/TorchToLinalg/PopulatePatterns.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ void populateIndirectDataMovementPatternsAndLegality(
void populateTensorConstructorsPatternsAndLegality(TypeConverter &typeConverter,
RewritePatternSet &patterns,
ConversionTarget &target);
//void populateCustomOpExamplePatternsAndLegality(TypeConverter &typeConverter,
// TODO: reenable custom op example
// void populateCustomOpExamplePatternsAndLegality(TypeConverter &typeConverter,
// RewritePatternSet &patterns,
// ConversionTarget &target);

Expand Down
3 changes: 2 additions & 1 deletion lib/Conversion/TorchToLinalg/TorchToLinalg.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ class ConvertTorchToLinalg

RewritePatternSet patterns(context);

//torch_to_linalg::populateCustomOpExamplePatternsAndLegality(
// TODO: reenable custom op example
// torch_to_linalg::populateCustomOpExamplePatternsAndLegality(
// typeConverter, patterns, target);
torch_to_linalg::populateTensorScalarInteropPatternsAndLegality(
typeConverter, patterns, target);
Expand Down
8 changes: 4 additions & 4 deletions python/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,8 @@ add_subdirectory(torch_mlir/eager_mode)
# Required for running the update_torch_ods.sh and update_shape_lib.sh scripts.
################################################################################

# TODO: renable once it build on macOS Intel / M1
#add_subdirectory(torch_mlir/_torch_mlir_custom_op_example)
#TODO: reenable custom op example
# add_subdirectory(torch_mlir/_torch_mlir_custom_op_example)

################################################################################
# Generate packages and shared library
Expand Down Expand Up @@ -160,8 +160,8 @@ if(TORCH_MLIR_ENABLE_JIT_IR_IMPORTER)
add_dependencies(TorchMLIRPythonModules TorchMLIRE2ETestPythonModules)
endif()

# TODO: Add after macOS builds are fixed
#add_dependencies(TorchMLIRPythonModules torch_mlir_custom_op_example)
#TODO: reenable custom op example
# add_dependencies(TorchMLIRPythonModules torch_mlir_custom_op_example)

if(TORCH_MLIR_ENABLE_LTC)
# Add Torch-MLIR LTC backend as dependency
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Setup PyTorch
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../dialects/torch/importer/jit_ir/cmake/modules")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/modules")
include(TorchMLIRPyTorch)
TorchMLIRProbeForPyTorchInstall()
find_package(Torch 1.8 REQUIRED)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1164,8 +1164,8 @@ def aten〇linalg_vector_norm(self: List[int], ord: float = 2, dim: Optional[Lis
dim = list(range(len(self)))
return upstream_shape_functions.mean_dim(self, dim, keepdim, dtype)

# TODO: Re-enable after MacOS support is fixed for the extension.
#def _torch_mlir_custom_op_example〇identity(t: List[int]) -> List[int]:
#TODO: reenable custom op example
# def _torch_mlir_custom_op_example〇identity(t: List[int]) -> List[int]:
# return upstream_shape_functions.unary(t)

# ==============================================================================
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -636,8 +636,8 @@ def emit_with_mutating_variants(key, **kwargs):
# extension.
# ==========================================================================

# TODO: Re-enable after MacOS support is fixed for the extension.
#emit("_torch_mlir_custom_op_example::identity : (Tensor) -> (Tensor)")
#TODO: reenable custom op example
# emit("_torch_mlir_custom_op_example::identity : (Tensor) -> (Tensor)")


def dump_registered_ops(outfile: TextIO, registry: Registry):
Expand Down
4 changes: 2 additions & 2 deletions python/torch_mlir_e2e_test/test_suite/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,5 +51,5 @@ def register_all_tests():
from . import return_types
from . import control_flow
from . import stats
# TODO: Re-enable after MacOS support is fixed for the extension.
#from . import custom_op_example
#TODO: reenable custom op example
# from . import custom_op_example

0 comments on commit 1fb97ac

Please sign in to comment.