From 78b67af6cb7a919a5400c9679169c366221cf283 Mon Sep 17 00:00:00 2001 From: Mark Harris Date: Fri, 6 Aug 2021 08:01:50 +1000 Subject: [PATCH] Fix more warnings (#311) Warnings missed in #299... Authors: - Mark Harris (https://github.com/harrism) Approvers: - Divye Gala (https://github.com/divyegala) - Dante Gama Dessavre (https://github.com/dantegd) URL: https://github.com/rapidsai/raft/pull/311 --- cpp/include/raft/distance/distance.cuh | 34 +++++++++++------------ cpp/include/raft/linalg/cublas_wrappers.h | 5 +--- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/cpp/include/raft/distance/distance.cuh b/cpp/include/raft/distance/distance.cuh index 1b39a6ec18..fc0d07773f 100644 --- a/cpp/include/raft/distance/distance.cuh +++ b/cpp/include/raft/distance/distance.cuh @@ -47,7 +47,7 @@ struct DistanceImpl { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + cudaStream_t stream, bool isRowMajor, InType) { raft::distance::euclideanAlgo1(m, n, k, x, y, dist, false, (AccType *)workspace, worksize, @@ -61,7 +61,7 @@ struct DistanceImpl { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + cudaStream_t stream, bool isRowMajor, InType) { raft::distance::euclideanAlgo1(m, n, k, x, y, dist, true, (AccType *)workspace, worksize, @@ -75,7 +75,7 @@ struct DistanceImpl { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + cudaStream_t stream, bool isRowMajor, InType) { raft::distance::cosineAlgo1( m, n, k, x, y, dist, (AccType *)workspace, worksize, fin_op, stream, isRowMajor); @@ -87,8 +87,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::euclideanAlgo2(m, n, k, x, y, dist, false, fin_op, stream, isRowMajor); @@ -100,8 +100,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::euclideanAlgo2(m, n, k, x, y, dist, true, fin_op, stream, isRowMajor); @@ -113,8 +113,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::l1Impl( m, n, k, x, y, dist, fin_op, stream, isRowMajor); } @@ -125,8 +125,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::chebyshevImpl(m, n, k, x, y, dist, fin_op, stream, isRowMajor); @@ -138,8 +138,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::hellingerImpl(m, n, k, x, y, dist, fin_op, stream, isRowMajor); @@ -151,8 +151,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType metric_arg) { raft::distance::minkowskiImpl(m, n, k, x, y, dist, fin_op, stream, isRowMajor, metric_arg); @@ -164,8 +164,8 @@ template { void run(const InType *x, const InType *y, OutType *dist, Index_ m, Index_ n, - Index_ k, void *workspace, size_t worksize, FinalLambda fin_op, - cudaStream_t stream, bool isRowMajor, InType metric_arg) { + Index_ k, void *, size_t, FinalLambda fin_op, cudaStream_t stream, + bool isRowMajor, InType) { raft::distance::canberraImpl( m, n, k, x, y, dist, fin_op, stream, isRowMajor); } diff --git a/cpp/include/raft/linalg/cublas_wrappers.h b/cpp/include/raft/linalg/cublas_wrappers.h index 7c79e6c91d..1be14a550d 100644 --- a/cpp/include/raft/linalg/cublas_wrappers.h +++ b/cpp/include/raft/linalg/cublas_wrappers.h @@ -86,10 +86,8 @@ inline const char *cublas_error_to_string(cublasStatus_t err) { /** FIXME: temporary alias for cuML compatibility */ #define CUBLAS_CHECK(call) CUBLAS_TRY(call) -///@todo: enable this once we have logging enabled -#if 0 /** check for cublas runtime API errors but do not assert */ -define CUBLAS_CHECK_NO_THROW(call) \ +#define CUBLAS_CHECK_NO_THROW(call) \ do { \ cublasStatus_t err = call; \ if (err != CUBLAS_STATUS_SUCCESS) { \ @@ -97,7 +95,6 @@ define CUBLAS_CHECK_NO_THROW(call) \ raft::linalg::detail::cublas_error_to_string(err)); \ } \ } while (0) -#endif namespace raft { namespace linalg {