Skip to content

Commit

Permalink
Merge branch 'branch-0.20' into lists_concatenate_list_elements
Browse files Browse the repository at this point in the history
  • Loading branch information
ttnghia committed May 12, 2021
2 parents 41ed5eb + cdf09ad commit 216a8ab
Show file tree
Hide file tree
Showing 70 changed files with 1,152 additions and 866 deletions.
5 changes: 2 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ RUN apt update -y --fix-missing && \
git \
gcc-${CC} \
g++-${CXX} \
libboost-all-dev \
tzdata && \
apt-get autoremove -y && \
apt-get clean && \
Expand Down Expand Up @@ -68,8 +67,8 @@ RUN if [ -f /cudf/docker/package_versions.sh ]; \
conda env create --name cudf --file /cudf/conda/environments/cudf_dev_cuda${CUDA_SHORT_VERSION}.yml ; \
fi

ENV CC=/usr/bin/gcc-${CC}
ENV CXX=/usr/bin/g++-${CXX}
ENV CC=/opts/conda/envs/rapids/bin/gcc-${CC}
ENV CXX=/opts/conda/envs/rapids/bin/g++-${CXX}

# libcudf & cudf build/install
RUN source activate cudf && \
Expand Down
1 change: 0 additions & 1 deletion conda/environments/cudf_dev_cuda11.0.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ dependencies:
- dlpack
- arrow-cpp=1.0.1
- arrow-cpp-proc * cuda
- boost-cpp>=1.72.0
- double-conversion
- rapidjson
- flatbuffers
Expand Down
1 change: 0 additions & 1 deletion conda/environments/cudf_dev_cuda11.1.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ dependencies:
- dlpack
- arrow-cpp=1.0.1
- arrow-cpp-proc * cuda
- boost-cpp>=1.72.0
- double-conversion
- rapidjson
- flatbuffers
Expand Down
1 change: 0 additions & 1 deletion conda/environments/cudf_dev_cuda11.2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ dependencies:
- dlpack
- arrow-cpp=1.0.1
- arrow-cpp-proc * cuda
- boost-cpp>=1.72.0
- double-conversion
- rapidjson
- flatbuffers
Expand Down
4 changes: 1 addition & 3 deletions conda/recipes/libcudf/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,10 @@ requirements:
- cudatoolkit {{ cuda_version }}.*
- arrow-cpp 1.0.1
- arrow-cpp-proc * cuda
- boost-cpp 1.72.0
- dlpack
run:
- {{ pin_compatible('cudatoolkit', max_pin='x.x') }}
- arrow-cpp-proc * cuda
- {{ pin_compatible('boost-cpp', max_pin='x.x.x') }}
- {{ pin_compatible('dlpack', max_pin='x.x') }}

test:
Expand All @@ -55,7 +53,7 @@ test:
- test -f $PREFIX/include/cudf/ast/transform.hpp
- test -f $PREFIX/include/cudf/ast/detail/linearizer.hpp
- test -f $PREFIX/include/cudf/ast/detail/operators.hpp
- test -f $PREFIX/include/cudf/ast/linearizer.hpp
- test -f $PREFIX/include/cudf/ast/nodes.hpp
- test -f $PREFIX/include/cudf/ast/operators.hpp
- test -f $PREFIX/include/cudf/binaryop.hpp
- test -f $PREFIX/include/cudf/labeling/label_bins.hpp
Expand Down
3 changes: 0 additions & 3 deletions cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,6 @@ find_package(ZLIB REQUIRED)
find_package(Threads REQUIRED)
# add third party dependencies using CPM
include(cmake/thirdparty/CUDF_GetCPM.cmake)
# find boost
include(cmake/thirdparty/CUDF_FindBoost.cmake)
# find jitify
include(cmake/thirdparty/CUDF_GetJitify.cmake)
# find thrust/cub
Expand Down Expand Up @@ -477,7 +475,6 @@ add_dependencies(cudf jitify_preprocess_run)
# Specify the target module library dependencies
target_link_libraries(cudf
PUBLIC ZLIB::ZLIB
Boost::filesystem
${ARROW_LIBRARIES}
cudf::Thrust
rmm::rmm)
Expand Down
2 changes: 0 additions & 2 deletions cpp/cmake/cudf-build-config.cmake.in
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ find_dependency(ZLIB)

# add third party dependencies using CPM
include(@CUDF_SOURCE_DIR@/cmake/thirdparty/CUDF_GetCPM.cmake)
# find boost
include(@CUDF_SOURCE_DIR@/cmake/thirdparty/CUDF_FindBoost.cmake)
# find jitify
include(@CUDF_SOURCE_DIR@/cmake/thirdparty/CUDF_GetJitify.cmake)
# find thrust/cub
Expand Down
5 changes: 0 additions & 5 deletions cpp/cmake/cudf-config.cmake.in
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,6 @@ find_dependency(CUDAToolkit)
find_dependency(Threads)
find_dependency(ZLIB)

# Don't look for a Boost CMake configuration file because it adds the
# `-DBOOST_ALL_NO_LIB` and `-DBOOST_FILESYSTEM_DYN_LINK` compile defs
set(Boost_NO_BOOST_CMAKE ON)
find_dependency(Boost @CUDF_MIN_VERSION_Boost@ COMPONENTS filesystem)

find_dependency(Arrow @CUDF_VERSION_Arrow@)

set(ArrowCUDA_DIR "${Arrow_DIR}")
Expand Down
38 changes: 0 additions & 38 deletions cpp/cmake/thirdparty/CUDF_FindBoost.cmake

This file was deleted.

22 changes: 22 additions & 0 deletions cpp/cmake/thrust.patch
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,25 @@ index 1ffeef0..5e80800 100644
for (int ITEM = 1; ITEM < ITEMS_PER_THREAD; ++ITEM)
{
if (ITEMS_PER_THREAD * tid + ITEM < num_remaining)
diff --git a/thrust/system/cuda/detail/scan_by_key.h b/thrust/system/cuda/detail/scan_by_key.h
index fe4b321c..b3974c69 100644
--- a/thrust/system/cuda/detail/scan_by_key.h
+++ b/thrust/system/cuda/detail/scan_by_key.h
@@ -513,7 +513,7 @@ namespace __scan_by_key {
scan_op(scan_op_)
{
int tile_idx = blockIdx.x;
- Size tile_base = ITEMS_PER_TILE * tile_idx;
+ Size tile_base = ITEMS_PER_TILE * static_cast<Size>(tile_idx);
Size num_remaining = num_items - tile_base;

if (num_remaining > ITEMS_PER_TILE)
@@ -734,7 +734,7 @@ namespace __scan_by_key {
ScanOp scan_op,
AddInitToScan add_init_to_scan)
{
- int num_items = static_cast<int>(thrust::distance(keys_first, keys_last));
+ size_t num_items = static_cast<size_t>(thrust::distance(keys_first, keys_last));
size_t storage_size = 0;
cudaStream_t stream = cuda_cub::stream(policy);
bool debug_sync = THRUST_DEBUG_SYNC_FLAG;
9 changes: 1 addition & 8 deletions cpp/include/cudf/ast/detail/linearizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,7 @@ class linearizer;
* This class is a part of a "visitor" pattern with the `linearizer` class.
* Nodes inheriting from this class can accept visitors.
*/
class node {
friend class detail::linearizer;

private:
struct node {
virtual cudf::size_type accept(detail::linearizer& visitor) const = 0;
};

Expand All @@ -102,10 +99,6 @@ class node {
* resolved into intermediate data storage in shared memory.
*/
class linearizer {
friend class literal;
friend class column_reference;
friend class expression;

public:
/**
* @brief Construct a new linearizer object
Expand Down
47 changes: 0 additions & 47 deletions cpp/include/cudf/ast/detail/operators.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -753,43 +753,6 @@ struct operator_functor<ast_operator::NOT> {
}
};

#if 0
/**
* @brief Functor used to double-type-dispatch binary operators.
*
* This functor's `operator()` is templated to validate calls to its operators based on the input
* type, as determined by the `is_valid_binary_op` trait.
*
* @tparam OperatorFunctor Binary operator functor.
*/
template <typename OperatorFunctor>
struct double_dispatch_binary_operator_types {
template <typename LHS,
typename RHS,
typename F,
typename... Ts,
std::enable_if_t<is_valid_binary_op<OperatorFunctor, LHS, RHS>>* = nullptr>
CUDA_HOST_DEVICE_CALLABLE void operator()(F&& f, Ts&&... args)
{
f.template operator()<OperatorFunctor, LHS, RHS>(std::forward<Ts>(args)...);
}

template <typename LHS,
typename RHS,
typename F,
typename... Ts,
std::enable_if_t<!is_valid_binary_op<OperatorFunctor, LHS, RHS>>* = nullptr>
CUDA_HOST_DEVICE_CALLABLE void operator()(F&& f, Ts&&... args)
{
#ifndef __CUDA_ARCH__
CUDF_FAIL("Invalid binary operation.");
#else
cudf_assert(false && "Invalid binary operation.");
#endif
}
};
#endif

/**
* @brief Functor used to single-type-dispatch binary operators.
*
Expand Down Expand Up @@ -856,16 +819,6 @@ struct type_dispatch_binary_op {
F&& f,
Ts&&... args)
{
#if 0
// Double dispatch
/*
double_type_dispatcher(lhs_type,
rhs_type,
detail::double_dispatch_binary_operator_types<operator_functor<op>>{},
std::forward<F>(f),
std::forward<Ts>(args)...);
*/
#endif
// Single dispatch (assume lhs_type == rhs_type)
type_dispatcher(lhs_type,
detail::single_dispatch_binary_operator_types<operator_functor<op>>{},
Expand Down
Loading

0 comments on commit 216a8ab

Please sign in to comment.