Skip to content

Commit

Permalink
Merge pull request rapidsai#151 from vyasr/feat/reenable_pr_ci
Browse files Browse the repository at this point in the history
Reenable PR CI
  • Loading branch information
vyasr authored Nov 5, 2023
2 parents 09d524a + 23ed065 commit 5b32287
Show file tree
Hide file tree
Showing 14 changed files with 121 additions and 68 deletions.
105 changes: 75 additions & 30 deletions .github/workflows/pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,14 @@ jobs:
pr-builder:
needs:
- checks
- changed_files
- conda-cpp-build
- conda-cpp-tests
- conda-python-build
- conda-python-cudf-tests
- conda-python-other-tests
- conda-java-tests
- conda-notebook-tests
- docs-build
- wheel-build-cudf
- wheel-build-cudf-main
- wheel-tests-cudf
Expand All @@ -31,50 +38,88 @@ jobs:
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
enable_check_generated_files: false
changed_files:
runs-on: ubuntu-latest
name: Test changed files
outputs:
cudf_files_changed: ${{ steps.changed_files.outputs.cudf_files_changed }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get changed files in cudf
id: changed_files
run: |
git fetch origin branch-23.10-xdf
diff=$(git diff --name-only origin/branch-23.10-xdf..HEAD python/cudf)
if [ -n "$diff" ]; then
has_changes='true'
else
has_changes='false'
fi
echo "Found changed: ${has_changes}"
echo "cudf_files_changed=${has_changes}" >> "$GITHUB_OUTPUT"
wheel-build-cudf:
conda-cpp-build:
needs: checks
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/wheels[email protected]
uses: rapidsai/shared-workflows/.github/workflows/conda-cpp[email protected]
with:
matrix_filter: map(select(.ARCH == "amd64" and .PY_VER == "3.10" and (.CUDA_VER == "11.8.0" or .CUDA_VER == "12.0.1")))
build_type: pull-request
script: "ci/build_wheel_cudf.sh"
conda-cpp-tests:
needs: conda-cpp-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
conda-python-build:
needs: conda-cpp-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
conda-python-cudf-tests:
needs: conda-python-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
test_script: "ci/test_python_cudf.sh"
conda-python-other-tests:
# Tests for dask_cudf, custreamz, cudf_kafka are separated for CI parallelism
needs: conda-python-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
test_script: "ci/test_python_other.sh"
conda-java-tests:
needs: conda-cpp-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
node_type: "gpu-v100-latest-1"
arch: "amd64"
container_image: "rapidsai/ci-conda:latest"
run_script: "ci/test_java.sh"
conda-notebook-tests:
needs: conda-python-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
node_type: "gpu-v100-latest-1"
arch: "amd64"
container_image: "rapidsai/ci-conda:latest"
run_script: "ci/test_notebooks.sh"
docs-build:
needs: conda-python-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
node_type: "gpu-v100-latest-1"
arch: "amd64"
container_image: "rapidsai/ci-conda:latest"
run_script: "ci/build_docs.sh"
wheel-build-cudf-main:
needs: checks
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
matrix_filter: map(select(.ARCH == "amd64" and .PY_VER == "3.10" and (.CUDA_VER == "11.8.0" or .CUDA_VER == "12.0.1")))
build_type: pull-request
script: "ci/build_wheel_cudf.sh main"
wheel-build-cudf:
needs: checks
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
script: "ci/build_wheel_cudf.sh"
wheel-tests-cudf:
if: needs.changed_files.outputs.cudf_files_changed == 'true'
needs: [wheel-build-cudf, changed_files]
needs: wheel-build-cudf
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
matrix_filter: map(select(.ARCH == "amd64" and .PY_VER == "3.10" and (.CUDA_VER == "11.8.0" or .CUDA_VER == "12.0.1")))
build_type: pull-request
script: ci/test_wheel_cudf.sh
wheel-build-dask-cudf:
Expand Down
1 change: 0 additions & 1 deletion conda/environments/all_cuda-118_arch-x86_64.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ channels:
- nvidia
dependencies:
- aiobotocore>=2.2.0
- aws-sdk-cpp<1.11
- benchmark==1.8.0
- boto3>=1.21.21
- botocore>=1.24.21
Expand Down
1 change: 0 additions & 1 deletion conda/environments/all_cuda-120_arch-x86_64.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ channels:
- nvidia
dependencies:
- aiobotocore>=2.2.0
- aws-sdk-cpp<1.11
- benchmark==1.8.0
- boto3>=1.21.21
- botocore>=1.24.21
Expand Down
1 change: 1 addition & 0 deletions conda/recipes/cudf/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ requirements:
- nvtx >=0.2.1
- packaging
- cachetools
- rich

test:
requires:
Expand Down
14 changes: 13 additions & 1 deletion conda/recipes/cudf_kafka/build.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@
# Copyright (c) 2020-2022, NVIDIA CORPORATION.
# Copyright (c) 2020-2023, NVIDIA CORPORATION.

# This assumes the script is executed from the root of the repo directory
# Need to set CUDA_HOME inside conda environments because the hacked together
# setup.py for cudf-kafka searches that way.
# TODO: Remove after https://github.com/rapidsai/cudf/pull/14292 updates
# cudf_kafka to use scikit-build
CUDA_MAJOR=${RAPIDS_CUDA_VERSION%%.*}
if [[ ${CUDA_MAJOR} == "12" ]]; then
target_name="x86_64-linux"
if [[ ! $(arch) == "x86_64" ]]; then
target_name="sbsa-linux"
fi
export CUDA_HOME="${PREFIX}/targets/${target_name}/"
fi
./build.sh -v cudf_kafka
11 changes: 11 additions & 0 deletions conda/recipes/cudf_kafka/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ build:
- SCCACHE_S3_KEY_PREFIX=cudf-kafka-linux64 # [linux64]
- SCCACHE_S3_USE_SSL
- SCCACHE_S3_NO_CREDENTIALS
# TODO: Remove after https://github.com/rapidsai/cudf/pull/14292 updates
# cudf_kafka to use scikit-build
- RAPIDS_CUDA_VERSION

requirements:
build:
Expand All @@ -41,13 +44,21 @@ requirements:
- {{ compiler('cxx') }}
- ninja
- sysroot_{{ target_platform }} {{ sysroot_version }}
# TODO: Remove after https://github.com/rapidsai/cudf/pull/14292 updates
# cudf_kafka to use scikit-build
{% if cuda_major == "12" %}
- cuda-gdb
{% endif %}
host:
- python
- cython >=3.0.0
- cuda-version ={{ cuda_version }}
- cudf ={{ version }}
- libcudf_kafka ={{ version }}
- setuptools
{% if cuda_major == "12" %}
- cuda-cudart-dev
{% endif %}
run:
- python
- {{ pin_compatible('cuda-version', max_pin='x', min_pin='x') }}
Expand Down
3 changes: 0 additions & 3 deletions conda/recipes/libcudf/conda_build_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@ gbench_version:
gtest_version:
- ">=1.13.0"

aws_sdk_cpp_version:
- "<1.11"

libarrow_version:
- "=12"

Expand Down
2 changes: 0 additions & 2 deletions conda/recipes/libcudf/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ requirements:
- gtest {{ gtest_version }}
- gmock {{ gtest_version }}
- zlib {{ zlib_version }}
- aws-sdk-cpp {{ aws_sdk_cpp_version }}

outputs:
- name: libcudf
Expand Down Expand Up @@ -108,7 +107,6 @@ outputs:
- dlpack {{ dlpack_version }}
- gtest {{ gtest_version }}
- gmock {{ gtest_version }}
- aws-sdk-cpp {{ aws_sdk_cpp_version }}
test:
commands:
- test -f $PREFIX/lib/libcudf.so
Expand Down
1 change: 0 additions & 1 deletion dependencies.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,6 @@ dependencies:
- libkvikio==23.10.*
- output_types: conda
packages:
- aws-sdk-cpp<1.11
- fmt>=9.1.0,<10
- &gbench benchmark==1.8.0
- &gtest gtest>=1.13.0
Expand Down
37 changes: 13 additions & 24 deletions docs/cudf/source/cudf_pandas/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -31,31 +31,20 @@ Speeding up pandas with cuDF
``cudf.pandas`` can be used with Jupyter Notebooks or any Python script just by
loading a notebook extension or adding one command-line flag:

.. list-table::
:widths: 1 1
:header-rows: 1

* - Python Script
- Notebook

* - .. code-block:: python

import pandas as pd
df = pd.read_csv("filepath")
df.groupby("col").mean()
df.rolling(window=3).sum()

# python -m cudf.pandas script.py


- .. code-block:: python

%load_ext cudf.pandas

import pandas as pd
df = pd.read_csv("filepath")
df.groupby("col").mean()
df.rolling(window=3).sum()
+--------------------------------------+------------------------------------+
| Python Script | Notebook |
+======================================+====================================+
| .. code-block:: python | .. code-block:: python |
| | |
| import pandas as pd | %load_ext cudf.pandas |
| df = pd.read_csv("filepath") | |
| df.groupby("col").mean() | import pandas as pd |
| df.rolling(window=3).sum() | df = pd.read_csv("filepath") |
| | df.groupby("col").mean() |
| # python -m cudf.pandas script.py | df.rolling(window=3).sum() |
| | |
+--------------------------------------+------------------------------------+


With cuDF's pandas Accelerator Mode, you can take pandas from worst-to-first on
Expand Down
4 changes: 2 additions & 2 deletions docs/cudf/source/user_guide/pandas-comparison.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ filtering, concatenating, joining, groupby and window operations -
among many others.

The best way to check if we support a particular Pandas API is to search
our [API docs](/api_docs/index).
our [API docs](/user_guide/api_docs/index).

## Data types

Expand Down Expand Up @@ -145,7 +145,7 @@ For example, `s.sum()` is not guaranteed to produce identical results
to Pandas nor produce identical results from run to run, when `s` is a
Series of floats. If you need to compare floating point results, you
should typically do so using the functions provided in the
[`cudf.testing`](/api_docs/general_utilities)
[`cudf.testing`](/user_guide/api_docs/general_utilities)
module, which allow you to compare values up to a desired precision.

## Column names
Expand Down
2 changes: 1 addition & 1 deletion python/cudf/cudf/core/multiindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def __init__(
if len(levels) == 0:
raise ValueError("Must pass non-zero number of levels/codes")
if not isinstance(codes, cudf.DataFrame) and not isinstance(
codes[0], (abc.Sequence, np.ndarray)
codes[0], (abc.Sequence, np.ndarray, cp.ndarray)
):
raise TypeError("Codes is not a Sequence of sequences")

Expand Down
3 changes: 2 additions & 1 deletion python/cudf/cudf/tests/test_no_cuinit.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def cuda_gdb(request):
return gdb
else:
output = subprocess.run(
[gdb, "--version"], capture_output=True, text=True
[gdb, "--version"], capture_output=True, text=True, cwd="/"
)
if output.returncode != 0:
request.applymarker(
Expand Down Expand Up @@ -97,6 +97,7 @@ def test_cudf_create_series_cuinit(cuda_gdb):
env=env,
capture_output=True,
text=True,
cwd="/",
)

cuInit_called = output.stdout.find("in cuInit ()")
Expand Down
4 changes: 3 additions & 1 deletion python/cudf/cudf/tests/test_numba_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ def test_kernel(x):
)
def test_numba_mvc_enabled_cuda_11():
cp = subprocess.run(
[sys.executable, "-c", TEST_NUMBA_MVC_ENABLED], capture_output=True
[sys.executable, "-c", TEST_NUMBA_MVC_ENABLED],
capture_output=True,
cwd="/",
)
assert cp.returncode == 0

0 comments on commit 5b32287

Please sign in to comment.