diff --git a/qa/L0_dlpack_multi_gpu/test.sh b/qa/L0_dlpack_multi_gpu/test.sh index 8bc72ed37bd..39b22a95d77 100644 --- a/qa/L0_dlpack_multi_gpu/test.sh +++ b/qa/L0_dlpack_multi_gpu/test.sh @@ -27,7 +27,7 @@ SERVER=/opt/tritonserver/bin/tritonserver SERVER_ARGS="--model-repository=`pwd`/models --log-verbose=1" -CLIENT_PY=../L0_backend_python/python_unittest.py +CLIENT_PY=./python_unittest.py CLIENT_LOG="./client.log" EXPECTED_NUM_TESTS="1" TEST_RESULT_FILE='test_results.txt' @@ -40,7 +40,7 @@ rm -fr *.log ./models source ../common/util.sh # Uninstall the non CUDA version of PyTorch -pip3 uninstall -y torch +# pip3 uninstall -y torch pip3 install torch==1.13.0+cu117 -f https://download.pytorch.org/whl/torch_stable.html pip3 install tensorflow @@ -52,6 +52,8 @@ rm -fr *.log ./models mkdir -p models/dlpack_test/1/ cp ../python_models/dlpack_test/model.py models/dlpack_test/1/ cp ../python_models/dlpack_test/config.pbtxt models/dlpack_test +cp ../L0_backend_python/python_unittest.py . +sed -i 's#sys.path.append("../../common")#sys.path.append("../common")#g' python_unittest.py run_server if [ "$SERVER_PID" == "0" ]; then