diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake
index 40775c0dd028b..8693dd5e01e45 100644
--- a/cmake/onnxruntime_unittests.cmake
+++ b/cmake/onnxruntime_unittests.cmake
@@ -1288,11 +1288,11 @@ if (NOT onnxruntime_MINIMAL_BUILD AND NOT onnxruntime_EXTENDED_MINIMAL_BUILD
endif()
if (onnxruntime_USE_TVM)
- # find_library(STVM_LIBS NAMES libtvm.so PATHS ${onnxruntime_TVM_HOME}/lib)
- # link_directories(onnxruntime_test_all ${STVM_LIBS})
+ # find_library(TVM_LIBS NAMES libtvm.so PATHS ${onnxruntime_TVM_HOME}/lib)
+ # link_directories(onnxruntime_test_all ${TVM_LIBS})
find_library(PYTHON_LIBS NAMES libpython PATHS /usr/local/lib)
#target_link_libraries(onnxruntime_test_all PRIVATE ${PYTHON_LIBRARIES} -lutil)
- # set(CMAKE_SHARED_LINKER_FLAGS "-Wl,-rpath,${STVM_LIBS}")
+ # set(CMAKE_SHARED_LINKER_FLAGS "-Wl,-rpath,${TVM_LIBS}")
endif()
include(onnxruntime_fuzz_test.cmake)
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
index cb8f7e602115f..1a076dc6864c4 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs
@@ -150,15 +150,15 @@ public static SessionOptions MakeSessionOptionWithNupharProvider(String settings
}
///
- /// A helper method to construct a SessionOptions object for Stvm execution.
+ /// A helper method to construct a SessionOptions object for TVM execution.
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
/// settings string, comprises of comma separated key:value pairs. default is empty
- /// A SessionsOptions() object configured for execution with Stvm
- public static SessionOptions MakeSessionOptionWithStvmProvider(String settings = "")
+ /// A SessionsOptions() object configured for execution with TVM
+ public static SessionOptions MakeSessionOptionWithTvmProvider(String settings = "")
{
SessionOptions options = new SessionOptions();
- options.AppendExecutionProvider_Stvm(settings);
+ options.AppendExecutionProvider_Tvm(settings);
return options;
}
@@ -360,11 +360,11 @@ public void AppendExecutionProvider_Nuphar(string settings = "")
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
- /// string with Stvm specific settings
- public void AppendExecutionProvider_Stvm(string settings = "")
+ /// string with TVM specific settings
+ public void AppendExecutionProvider_Tvm(string settings = "")
{
#if __MOBILE__
- throw new NotSupportedException("The Stvm Execution Provider is not supported in this build");
+ throw new NotSupportedException("The TVM Execution Provider is not supported in this build");
#else
var settingsPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(settings), GCHandleType.Pinned);
using (var pinnedSettingsName = new PinnedGCHandle(settingsPinned))
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
index b292fe989d14f..202025df7bb11 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs
@@ -119,7 +119,7 @@ public void TestSessionOptions()
#endif
#if USE_TVM
- opt.AppendExecutionProvider_Stvm("Vulkan -device=amd_apu");
+ opt.AppendExecutionProvider_Tvm("Vulkan -device=amd_apu");
#endif
#if USE_NUPHAR
diff --git a/docs/python/inference/notebooks/onnxruntime-stvm-tutorial.ipynb b/docs/python/inference/notebooks/onnxruntime-stvm-tutorial.ipynb
index fec3f8abcdfde..287192c57e333 100644
--- a/docs/python/inference/notebooks/onnxruntime-stvm-tutorial.ipynb
+++ b/docs/python/inference/notebooks/onnxruntime-stvm-tutorial.ipynb
@@ -5,14 +5,14 @@
"id": "72476497",
"metadata": {},
"source": [
- "# ONNX Runtime: Tutorial for STVM execution provider\n",
+ "# ONNX Runtime: Tutorial for TVM execution provider\n",
"\n",
- "This notebook shows a simple example for model inference with STVM EP.\n",
+ "This notebook shows a simple example for model inference with TVM EP.\n",
"\n",
"\n",
"#### Tutorial Roadmap:\n",
"1. Prerequistes\n",
- "2. Accuracy check for STVM EP\n",
+ "2. Accuracy check for TVM EP\n",
"3. Configuration options"
]
},
@@ -72,7 +72,7 @@
"source": [
"import tvm.testing\n",
"from tvm.contrib.download import download_testdata\n",
- "import onnxruntime.providers.stvm # nessesary to register tvm_onnx_import_and_compile and others"
+ "import onnxruntime.providers.tvm # nessesary to register tvm_onnx_import_and_compile and others"
]
},
{
@@ -254,7 +254,7 @@
"id": "90fb7c5c",
"metadata": {},
"source": [
- "## 2. Accuracy check for STVM EP \n",
+ "## 2. Accuracy check for TVM EP \n",
"\n",
"This section will check the accuracy. The check will be to compare the output tensors for `CPUExecutionProvider` and `TvmExecutionProvider`. See the description of `verify_with_ort_with_inputs` function used above.\n",
"\n",
@@ -424,7 +424,7 @@
"source": [
"## 3. Configuration options\n",
"\n",
- "This section shows how you can configure STVM EP using custom options. For more details on the options used, see the corresponding section of the documentation."
+ "This section shows how you can configure TVM EP using custom options. For more details on the options used, see the corresponding section of the documentation."
]
},
{
diff --git a/java/src/main/java/ai/onnxruntime/OrtSession.java b/java/src/main/java/ai/onnxruntime/OrtSession.java
index 2f2b39dc8d09d..34b707f3de9f2 100644
--- a/java/src/main/java/ai/onnxruntime/OrtSession.java
+++ b/java/src/main/java/ai/onnxruntime/OrtSession.java
@@ -890,14 +890,14 @@ public void addNuphar(boolean allowUnalignedBuffers, String settings) throws Ort
}
/**
- * Adds Stvm as an execution backend.
+ * Adds TVM as an execution backend.
*
* @param settings See the documentation for valid settings strings.
* @throws OrtException If there was an error in native code.
*/
- public void addStvm(String settings) throws OrtException {
+ public void addTvm(String settings) throws OrtException {
checkClosed();
- addStvm(OnnxRuntime.ortApiHandle, nativeHandle, settings);
+ addTvm(OnnxRuntime.ortApiHandle, nativeHandle, settings);
}
/**
@@ -1045,7 +1045,7 @@ private native void addNuphar(
long apiHandle, long nativeHandle, int allowUnalignedBuffers, String settings)
throws OrtException;
- private native void addStvm(
+ private native void addTvm(
long apiHandle, long nativeHandle, String settings)
throws OrtException;
diff --git a/java/src/main/native/ai_onnxruntime_OrtSession_SessionOptions.c b/java/src/main/native/ai_onnxruntime_OrtSession_SessionOptions.c
index 69a904b7e1edc..e7f4c15fea712 100644
--- a/java/src/main/native/ai_onnxruntime_OrtSession_SessionOptions.c
+++ b/java/src/main/native/ai_onnxruntime_OrtSession_SessionOptions.c
@@ -466,10 +466,10 @@ JNIEXPORT void JNICALL Java_ai_onnxruntime_OrtSession_00024SessionOptions_addNup
/*
* Class:: ai_onnxruntime_OrtSession_SessionOptions
- * Method: addStvm
+ * Method: addTvm
* Signature: (JILjava/lang/String)V
*/
-JNIEXPORT void JNICALL Java_ai_onnxruntime_OrtSession_00024SessionOptions_addStvm
+JNIEXPORT void JNICALL Java_ai_onnxruntime_OrtSession_00024SessionOptions_addTvm
(JNIEnv * jniEnv, jobject jobj, jlong apiHandle, jlong handle, jstring settingsString) {
(void)jobj;
#ifdef USE_TVM
@@ -477,8 +477,8 @@ JNIEXPORT void JNICALL Java_ai_onnxruntime_OrtSession_00024SessionOptions_addStv
checkOrtStatus(jniEnv,(const OrtApi*)apiHandle,OrtSessionOptionsAppendExecutionProvider_Tvm((OrtSessionOptions*) handle, settings));
(*jniEnv)->ReleaseStringUTFChars(jniEnv,settingsString,settings);
#else
- (void)apiHandle;(void)handle;(void)settingsString; // Parameters used when Stvm is defined.
- throwOrtException(jniEnv,convertErrorCode(ORT_INVALID_ARGUMENT),"This binary was not compiled with Stvm support.");
+ (void)apiHandle;(void)handle;(void)settingsString; // Parameters used when TVM is defined.
+ throwOrtException(jniEnv,convertErrorCode(ORT_INVALID_ARGUMENT),"This binary was not compiled with TVM support.");
#endif
}
diff --git a/onnxruntime/core/session/provider_stubs.cc b/onnxruntime/core/session/provider_stubs.cc
index dedab38cdd29d..067f8a600e2aa 100644
--- a/onnxruntime/core/session/provider_stubs.cc
+++ b/onnxruntime/core/session/provider_stubs.cc
@@ -63,7 +63,7 @@ ORT_API_STATUS_IMPL(OrtSessionOptionsAppendExecutionProvider_Tvm,
_In_ OrtSessionOptions* options, _In_ const char* settings) {
ORT_UNUSED_PARAMETER(options);
ORT_UNUSED_PARAMETER(settings);
- return CreateNotEnabledStatus("Stvm");
+ return CreateNotEnabledStatus("Tvm");
}
#endif
diff --git a/onnxruntime/python/onnxruntime_pybind_state_common.h b/onnxruntime/python/onnxruntime_pybind_state_common.h
index 5089c098396fc..093fced91c3ba 100644
--- a/onnxruntime/python/onnxruntime_pybind_state_common.h
+++ b/onnxruntime/python/onnxruntime_pybind_state_common.h
@@ -23,7 +23,7 @@ struct OrtStatus {
char msg[1]; // a null-terminated string
};
-#define BACKEND_DEVICE BACKEND_PROC BACKEND_DNNL BACKEND_OPENVINO BACKEND_NUPHAR BACKEND_STVM BACKEND_OPENBLAS BACKEND_MIGRAPHX BACKEND_ACL BACKEND_ARMNN BACKEND_DML
+#define BACKEND_DEVICE BACKEND_PROC BACKEND_DNNL BACKEND_OPENVINO BACKEND_NUPHAR BACKEND_TVM BACKEND_OPENBLAS BACKEND_MIGRAPHX BACKEND_ACL BACKEND_ARMNN BACKEND_DML
#include "core/session/onnxruntime_cxx_api.h"
#include "core/providers/providers.h"
#include "core/providers/cpu/cpu_execution_provider.h"
@@ -87,9 +87,9 @@ struct OrtStatus {
#endif
#ifdef USE_TVM
-#define BACKEND_STVM "-STVM"
+#define BACKEND_TVM "-TVM"
#else
-#define BACKEND_STVM ""
+#define BACKEND_TVM ""
#endif
#if USE_VITISAI