From 3f61b64cd9c626c1519509a0fd30e590131b67ab Mon Sep 17 00:00:00 2001 From: Malte Foerster Date: Thu, 30 Mar 2023 20:47:29 +0000 Subject: [PATCH] changed order of arguments according to best practice --- .../distance/detail/kernels/gram_matrix.cuh | 72 +++++++++--------- .../detail/kernels/kernel_matrices.cuh | 74 +++++++++---------- cpp/test/distance/gram.cu | 2 +- cpp/test/sparse/gram.cu | 6 +- 4 files changed, 77 insertions(+), 77 deletions(-) diff --git a/cpp/include/raft/distance/detail/kernels/gram_matrix.cuh b/cpp/include/raft/distance/detail/kernels/gram_matrix.cuh index 31feb75e05..f03f746161 100644 --- a/cpp/include/raft/distance/detail/kernels/gram_matrix.cuh +++ b/cpp/include/raft/distance/detail/kernels/gram_matrix.cuh @@ -64,118 +64,118 @@ class GramMatrixBase { /** Convenience function to evaluate the Gram matrix for two vector sets. * Vector sets are provided in Matrix format * + * @param [in] handle raft handle * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void operator()(dense_input_matrix_view_t x1, + void operator()(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1 = nullptr, math_t* norm_x2 = nullptr) { - evaluate(x1, x2, out, handle, norm_x1, norm_x2); + evaluate(handle, x1, x2, out, norm_x1, norm_x2); } /** Convenience function to evaluate the Gram matrix for two vector sets. * Vector sets are provided in Matrix format * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void operator()(csr_input_matrix_view_t x1, + void operator()(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1 = nullptr, math_t* norm_x2 = nullptr) { - evaluate(x1, x2, out, handle, norm_x1, norm_x2); + evaluate(handle, x1, x2, out, norm_x1, norm_x2); } /** Convenience function to evaluate the Gram matrix for two vector sets. * Vector sets are provided in Matrix format * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void operator()(csr_input_matrix_view_t x1, + void operator()(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1 = nullptr, math_t* norm_x2 = nullptr) { - evaluate(x1, x2, out, handle, norm_x1, norm_x2); + evaluate(handle, x1, x2, out, norm_x1, norm_x2); } // unfortunately, 'evaluate' cannot be templatized as it needs to be virtual /** Evaluate the Gram matrix for two vector sets using simple dot product. * + * @param [in] handle raft handle * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - virtual void evaluate(dense_input_matrix_view_t x1, + virtual void evaluate(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { - linear(x1, x2, out, handle); + linear(handle, x1, x2, out); } /** Evaluate the Gram matrix for two vector sets using simple dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - virtual void evaluate(csr_input_matrix_view_t x1, + virtual void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { - linear(x1, x2, out, handle); + linear(handle, x1, x2, out); } /** Evaluate the Gram matrix for two vector sets using simple dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - virtual void evaluate(csr_input_matrix_view_t x1, + virtual void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { - linear(x1, x2, out, handle); + linear(handle, x1, x2, out); } /** Evaluate the Gram matrix for two vector sets using simple dot product. @@ -340,15 +340,15 @@ class GramMatrixBase { * * Can be used as a building block for more complex kernel functions. * + * @param [in] handle raft handle * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle */ - void linear(dense_input_matrix_view_t x1, + void linear(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, - dense_output_matrix_view_t out, - raft::device_resources const& handle) + dense_output_matrix_view_t out) { // check is_row_major consistency bool is_row_major = get_is_row_major(x1) && get_is_row_major(x2) && get_is_row_major(out); @@ -416,15 +416,15 @@ class GramMatrixBase { * * Can be used as a building block for more complex kernel functions. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle */ - void linear(csr_input_matrix_view_t x1, + void linear(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, - dense_output_matrix_view_t out, - raft::device_resources const& handle) + dense_output_matrix_view_t out) { // check is_row_major consistency bool is_row_major = get_is_row_major(x2) && get_is_row_major(out); @@ -453,15 +453,15 @@ class GramMatrixBase { * * Can be used as a building block for more complex kernel functions. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle */ - void linear(csr_input_matrix_view_t x1, + void linear(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, - dense_output_matrix_view_t out, - raft::device_resources const& handle) + dense_output_matrix_view_t out) { // check is_row_major consistency bool is_row_major = get_is_row_major(out); diff --git a/cpp/include/raft/distance/detail/kernels/kernel_matrices.cuh b/cpp/include/raft/distance/detail/kernels/kernel_matrices.cuh index 1117165c76..785c66a3a2 100644 --- a/cpp/include/raft/distance/detail/kernels/kernel_matrices.cuh +++ b/cpp/include/raft/distance/detail/kernels/kernel_matrices.cuh @@ -188,23 +188,23 @@ class PolynomialKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(dense_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -215,23 +215,23 @@ class PolynomialKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -242,23 +242,23 @@ class PolynomialKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -346,23 +346,23 @@ class TanhKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(dense_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -373,23 +373,23 @@ class TanhKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -400,23 +400,23 @@ class TanhKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and < , > denotes dot product. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 unused. * @param norm_x2 unused. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel( out.data_handle(), ld_out, out.extent(0), out.extent(1), is_row_major, handle.get_stream()); } @@ -535,17 +535,17 @@ class RBFKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and | | euclidean distance. * - * @param [in] x2 dense device matrix view, size [n2*n_cols] + * @param [in] handle raft handle + * @param [in] x1 dense device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void evaluate(dense_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + dense_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { @@ -568,7 +568,7 @@ class RBFKernel : public GramMatrixBase { // compute L2expanded bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel(out.data_handle(), ld_out, out.extent(0), @@ -585,17 +585,17 @@ class RBFKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and | | euclidean distance. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 dense device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, dense_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { @@ -618,7 +618,7 @@ class RBFKernel : public GramMatrixBase { // compute L2expanded bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel(out.data_handle(), ld_out, out.extent(0), @@ -635,17 +635,17 @@ class RBFKernel : public GramMatrixBase { * where x1_i is the i-th vector from the x1 set, and x2_k is k-th vector * in the x2 set, and | | euclidean distance. * + * @param [in] handle raft handle * @param [in] x1 csr device matrix view, size [n1*n_cols] * @param [in] x2 csr device matrix view, size [n2*n_cols] * @param [out] out dense device matrix view for the Gram matrix, size [n1*n2] - * @param [in] handle raft handle * @param norm_x1 optional L2-norm of x1's rows for computation within RBF. * @param norm_x2 optional L2-norm of x2's rows for computation within RBF. */ - void evaluate(csr_input_matrix_view_t x1, + void evaluate(raft::device_resources const& handle, + csr_input_matrix_view_t x1, csr_input_matrix_view_t x2, dense_output_matrix_view_t out, - raft::device_resources const& handle, math_t* norm_x1, math_t* norm_x2) { @@ -668,7 +668,7 @@ class RBFKernel : public GramMatrixBase { // compute L2expanded bool is_row_major = GramMatrixBase::get_is_row_major(out); int ld_out = is_row_major ? out.stride(0) : out.stride(1); - GramMatrixBase::linear(x1, x2, out, handle); + GramMatrixBase::linear(handle, x1, x2, out); applyKernel(out.data_handle(), ld_out, out.extent(0), diff --git a/cpp/test/distance/gram.cu b/cpp/test/distance/gram.cu index c4277c7c98..47da201465 100644 --- a/cpp/test/distance/gram.cu +++ b/cpp/test/distance/gram.cu @@ -129,7 +129,7 @@ class GramMatrixTest : public ::testing::TestWithParam { : raft::make_device_strided_matrix_view( gram.data(), params.n1, params.n2, params.ld_out); - (*kernel)(x1_span, x2_span, out_span, handle); + (*kernel)(handle, x1_span, x2_span, out_span); naiveGramMatrixKernel(params.n1, params.n2, diff --git a/cpp/test/sparse/gram.cu b/cpp/test/sparse/gram.cu index cf0ddfc921..e0bfb94f94 100644 --- a/cpp/test/sparse/gram.cu +++ b/cpp/test/sparse/gram.cu @@ -232,7 +232,7 @@ class GramMatrixTest : public ::testing::TestWithParam { gram.data(), params.n1, params.n2, params.ld_out); if (params.sparse_input == SparseType::DENSE) { - (*kernel)(x1_span, x2_span, out_span, handle); + (*kernel)(handle, x1_span, x2_span, out_span); } else { x1_csr_indptr.reserve(params.n1 + 1, stream); x1_csr_indices.reserve(params.n1 * params.n_cols, stream); @@ -252,7 +252,7 @@ class GramMatrixTest : public ::testing::TestWithParam { x1_csr_structure); if (params.sparse_input == SparseType::MIX) { - (*kernel)(x1_csr, x2_span, out_span, handle); + (*kernel)(handle, x1_csr, x2_span, out_span); } else { x2_csr_indptr.reserve(params.n2 + 1, stream); x2_csr_indices.reserve(params.n2 * params.n_cols, stream); @@ -270,7 +270,7 @@ class GramMatrixTest : public ::testing::TestWithParam { raft::device_span(x2_csr_data.data(), x2_csr_structure.get_nnz()), x2_csr_structure); - (*kernel)(x1_csr, x2_csr, out_span, handle); + (*kernel)(handle, x1_csr, x2_csr, out_span); } }