diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 14a74618413..1dc31da8e80 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -164,6 +164,8 @@ jobs: matrix_filter: map(select(.ARCH == "amd64")) | max_by(.CUDA_VER) | [.] build_type: pull-request script: ci/cudf_pandas_scripts/pandas-tests/run.sh pr + # Hide test failures because they exceed the GITHUB_STEP_SUMMARY output limit. + test_summary_show: "none" #pandas-tests-diff: # # diff the results of running the Pandas unit tests and publish a job summary # needs: [pandas-tests-main, pandas-tests-pr] diff --git a/ci/cudf_pandas_scripts/pandas-tests/run.sh b/ci/cudf_pandas_scripts/pandas-tests/run.sh index be5705a9548..482af42201f 100755 --- a/ci/cudf_pandas_scripts/pandas-tests/run.sh +++ b/ci/cudf_pandas_scripts/pandas-tests/run.sh @@ -1,12 +1,14 @@ #!/usr/bin/env bash -# SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. +# SPDX-FileCopyrightText: Copyright (c) 2023-2024, NVIDIA CORPORATION & AFFILIATES. # All rights reserved. # SPDX-License-Identifier: Apache-2.0 +set -euo pipefail + PANDAS_TESTS_BRANCH=${1} rapids-logger "Running Pandas tests using $PANDAS_TESTS_BRANCH branch" -rapids-logger "PR number: $RAPIDS_REF_NAME" +rapids-logger "PR number: ${RAPIDS_REF_NAME:-"unknown"}" # Set the manylinux version used for downloading the wheels so that we test the # newer ABI wheels on the newer images that support their installation. @@ -25,14 +27,16 @@ RAPIDS_PY_CUDA_SUFFIX="$(rapids-wheel-ctk-name-gen ${RAPIDS_CUDA_VERSION})" RAPIDS_PY_WHEEL_NAME="cudf_${manylinux}_${RAPIDS_PY_CUDA_SUFFIX}" rapids-download-wheels-from-s3 ./local-cudf-dep python -m pip install $(ls ./local-cudf-dep/cudf*.whl)[test,pandas-tests] -git checkout $COMMIT +RESULTS_DIR=${RAPIDS_TESTS_DIR:-"$(mktemp -d)"} +RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${RESULTS_DIR}/test-results"}/ +mkdir -p "${RAPIDS_TESTS_DIR}" bash python/cudf/cudf/pandas/scripts/run-pandas-tests.sh \ -n 10 \ --tb=line \ - --skip-slow \ + -m "not slow" \ --max-worker-restart=3 \ - --import-mode=importlib \ + --junitxml="${RAPIDS_TESTS_DIR}/junit-cudf-pandas.xml" \ --report-log=${PANDAS_TESTS_BRANCH}.json 2>&1 # summarize the results and save them to artifacts: diff --git a/ci/test_wheel_cudf.sh b/ci/test_wheel_cudf.sh index 8c42651e299..b7e8f862ed5 100755 --- a/ci/test_wheel_cudf.sh +++ b/ci/test_wheel_cudf.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. set -eou pipefail @@ -22,9 +22,22 @@ RAPIDS_PY_WHEEL_NAME="cudf_${manylinux}_${RAPIDS_PY_CUDA_SUFFIX}" rapids-downloa # echo to expand wildcard before adding `[extra]` requires for pip python -m pip install $(echo ./dist/cudf*.whl)[test] +RESULTS_DIR=${RAPIDS_TESTS_DIR:-"$(mktemp -d)"} +RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${RESULTS_DIR}/test-results"}/ +mkdir -p "${RAPIDS_TESTS_DIR}" + # Run smoke tests for aarch64 pull requests if [[ "$(arch)" == "aarch64" && ${RAPIDS_BUILD_TYPE} == "pull-request" ]]; then + rapids-logger "Run smoke tests for cudf" python ./ci/wheel_smoke_test_cudf.py else - python -m pytest -n 8 ./python/cudf/cudf/tests + rapids-logger "pytest cudf" + pushd python/cudf/cudf/tests + python -m pytest \ + --cache-clear \ + --junitxml="${RAPIDS_TESTS_DIR}/junit-cudf.xml" \ + --numprocesses=8 \ + --dist=loadscope \ + . + popd fi diff --git a/ci/test_wheel_dask_cudf.sh b/ci/test_wheel_dask_cudf.sh index e9162b816aa..74fcb43ddca 100755 --- a/ci/test_wheel_dask_cudf.sh +++ b/ci/test_wheel_dask_cudf.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2023, NVIDIA CORPORATION. +# Copyright (c) 2023-2024, NVIDIA CORPORATION. set -eou pipefail @@ -26,5 +26,15 @@ python -m pip install --no-deps ./local-cudf-dep/cudf*.whl # echo to expand wildcard before adding `[extra]` requires for pip python -m pip install $(echo ./dist/dask_cudf*.whl)[test] +RESULTS_DIR=${RAPIDS_TESTS_DIR:-"$(mktemp -d)"} +RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${RESULTS_DIR}/test-results"}/ +mkdir -p "${RAPIDS_TESTS_DIR}" + # Run tests in dask_cudf/tests and dask_cudf/io/tests -python -m pytest -n 8 ./python/dask_cudf/dask_cudf/ +rapids-logger "pytest dask_cudf" +pushd python/dask_cudf/dask_cudf +python -m pytest \ + --junitxml="${RAPIDS_TESTS_DIR}/junit-dask-cudf.xml" \ + --numprocesses=8 \ + . +popd diff --git a/python/cudf/cudf/pandas/scripts/run-pandas-tests.sh b/python/cudf/cudf/pandas/scripts/run-pandas-tests.sh index 4fe152cc493..319e5ba80fc 100755 --- a/python/cudf/cudf/pandas/scripts/run-pandas-tests.sh +++ b/python/cudf/cudf/pandas/scripts/run-pandas-tests.sh @@ -16,12 +16,13 @@ # # This script creates a `pandas-testing` directory if it doesn't exist +set -euo pipefail # Grab the Pandas source corresponding to the version # of Pandas installed. PANDAS_VERSION=$(python -c "import pandas; print(pandas.__version__)") -PYTEST_IGNORES="--ignore=tests/io/test_user_agent.py" +PYTEST_IGNORES="--ignore=tests/io/test_user_agent.py --ignore=tests/interchange/test_impl.py" mkdir -p pandas-testing cd pandas-testing @@ -92,7 +93,7 @@ cd pandas-tests/ # test_overwrite_warns unsafely patchs over Series.mean affecting other tests when run in parallel # test_complex_series_frame_alignment randomly selects a DataFrames and axis to test but particular random selection(s) always fails # test_numpy_ufuncs_basic compares floating point values to unbounded precision, sometimes leading to failures -TEST_NUMPY_UFUNCS_BASIC_FLAKY="test_numpy_ufuncs_basic[float-exp] \ +TEST_NUMPY_UFUNCS_BASIC_FLAKY="not test_numpy_ufuncs_basic[float-exp] \ and not test_numpy_ufuncs_basic[float-exp2] \ and not test_numpy_ufuncs_basic[float-expm1] \ and not test_numpy_ufuncs_basic[float-log] \ @@ -183,11 +184,12 @@ and not test_numpy_ufuncs_basic[nullable_float-rad2deg]" PANDAS_CI="1" python -m pytest -p cudf.pandas \ -m "not single_cpu and not db" \ - -k "not test_overwrite_warns and not test_complex_series_frame_alignment and not $TEST_NUMPY_UFUNCS_BASIC_FLAKY" \ + -k "not test_overwrite_warns and not test_complex_series_frame_alignment and $TEST_NUMPY_UFUNCS_BASIC_FLAKY" \ --durations=50 \ --import-mode=importlib \ -o xfail_strict=True \ - ${PYTEST_IGNORES} $@ + ${PYTEST_IGNORES} \ + "$@" || [ $? = 1 ] # Exit success if exit code was 1 (permit test failures but not other errors) mv *.json .. cd ..