Skip to content

Commit

Permalink
Enable nbqa pre-commit hooks for isort and black. (#12848)
Browse files Browse the repository at this point in the history
This enables `black` and `isort` linters for ipynb notebooks via [nbqa](https://github.com/nbQA-dev/nbQA). I propose this change to avoid manually linting notebooks like #12595. cc: @galipremsagar

Authors:
  - Bradley Dice (https://github.com/bdice)

Approvers:
  - GALI PREM SAGAR (https://github.com/galipremsagar)

URL: #12848
  • Loading branch information
bdice authored Feb 24, 2023
1 parent 77c2e03 commit 4f2f379
Show file tree
Hide file tree
Showing 5 changed files with 141 additions and 109 deletions.
10 changes: 10 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,16 @@ repos:
# https://github.com/PyCQA/pydocstyle/issues/603
additional_dependencies: [toml]
args: ["--config=pyproject.toml"]
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.6.3
hooks:
- id: nbqa-isort
# Use the cudf_kafka isort orderings in notebooks so that dask
# and RAPIDS packages have their own sections.
args: ["--settings-file=python/cudf_kafka/pyproject.toml"]
- id: nbqa-black
# Explicitly specify the pyproject.toml at the repo root, not per-project.
args: ["--config=pyproject.toml"]
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v11.1.0
hooks:
Expand Down
1 change: 1 addition & 0 deletions docs/cudf/source/user_guide/10min.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"\n",
"import cupy as cp\n",
"import pandas as pd\n",
"\n",
"import cudf\n",
"import dask_cudf\n",
"\n",
Expand Down
34 changes: 20 additions & 14 deletions docs/cudf/source/user_guide/cupy-interop.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@
"outputs": [],
"source": [
"import timeit\n",
"from packaging import version\n",
"\n",
"import cupy as cp\n",
"from packaging import version\n",
"\n",
"import cudf\n",
"\n",
"if version.parse(cp.__version__) >= version.parse(\"10.0.0\"):\n",
Expand Down Expand Up @@ -63,10 +64,13 @@
],
"source": [
"nelem = 10000\n",
"df = cudf.DataFrame({'a':range(nelem),\n",
" 'b':range(500, nelem + 500),\n",
" 'c':range(1000, nelem + 1000)}\n",
" )\n",
"df = cudf.DataFrame(\n",
" {\n",
" \"a\": range(nelem),\n",
" \"b\": range(500, nelem + 500),\n",
" \"c\": range(1000, nelem + 1000),\n",
" }\n",
")\n",
"\n",
"%timeit arr_cupy = cupy_from_dlpack(df.to_dlpack())\n",
"%timeit arr_cupy = df.values\n",
Expand Down Expand Up @@ -138,7 +142,7 @@
}
],
"source": [
"col = 'a'\n",
"col = \"a\"\n",
"\n",
"%timeit cola_cupy = cp.asarray(df[col])\n",
"%timeit cola_cupy = cupy_from_dlpack(df[col].to_dlpack())\n",
Expand Down Expand Up @@ -1088,14 +1092,16 @@
"metadata": {},
"outputs": [],
"source": [
"def cudf_to_cupy_sparse_matrix(data, sparseformat='column'):\n",
" \"\"\"Converts a cuDF object to a CuPy Sparse Column matrix.\n",
" \"\"\"\n",
" if sparseformat not in ('row', 'column',):\n",
"def cudf_to_cupy_sparse_matrix(data, sparseformat=\"column\"):\n",
" \"\"\"Converts a cuDF object to a CuPy Sparse Column matrix.\"\"\"\n",
" if sparseformat not in (\n",
" \"row\",\n",
" \"column\",\n",
" ):\n",
" raise ValueError(\"Let's focus on column and row formats for now.\")\n",
" \n",
"\n",
" _sparse_constructor = cp.sparse.csc_matrix\n",
" if sparseformat == 'row':\n",
" if sparseformat == \"row\":\n",
" _sparse_constructor = cp.sparse.csr_matrix\n",
"\n",
" return _sparse_constructor(cupy_from_dlpack(data.to_dlpack()))"
Expand All @@ -1121,8 +1127,8 @@
"nonzero = 1000\n",
"for i in range(20):\n",
" arr = cp.random.normal(5, 5, nelem)\n",
" arr[cp.random.choice(arr.shape[0], nelem-nonzero, replace=False)] = 0\n",
" df['a' + str(i)] = arr"
" arr[cp.random.choice(arr.shape[0], nelem - nonzero, replace=False)] = 0\n",
" df[\"a\" + str(i)] = arr"
]
},
{
Expand Down
Loading

0 comments on commit 4f2f379

Please sign in to comment.