Skip to content

Commit

Permalink
Ensure to_* IO methods respect pandas 2.2 keyword only deprecation (#…
Browse files Browse the repository at this point in the history
…14999)

This only really affected `to_hdf`

Authors:
  - Matthew Roeschke (https://github.com/mroeschke)

Approvers:
  - GALI PREM SAGAR (https://github.com/galipremsagar)

URL: #14999
  • Loading branch information
mroeschke authored Feb 8, 2024
1 parent 7f28f2f commit 03f63ec
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
4 changes: 2 additions & 2 deletions python/cudf/cudf/_fuzz_testing/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020-2023, NVIDIA CORPORATION.
# Copyright (c) 2020-2024, NVIDIA CORPORATION.

import random

Expand Down Expand Up @@ -216,7 +216,7 @@ def pandas_to_avro(df, file_name=None, file_io_obj=None):
schema = get_avro_schema(df)
avro_schema = fastavro.parse_schema(schema)

records = df.to_dict("records")
records = df.to_dict(orient="records")
records = convert_nulls_to_none(records, df)

if file_name is not None:
Expand Down
2 changes: 1 addition & 1 deletion python/cudf/cudf/io/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ def to_hdf(path_or_buf, key, value, *args, **kwargs):
"be GPU accelerated in the future"
)
pd_value = value.to_pandas()
pd_value.to_hdf(path_or_buf, key, *args, **kwargs)
pd_value.to_hdf(path_or_buf, key=key, *args, **kwargs)
12 changes: 6 additions & 6 deletions python/cudf/cudf/tests/test_hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,15 @@ def hdf_files(request, tmp_path_factory, pdf):
pdf = pdf.drop("col_category", axis=1)

fname_df = tmp_path_factory.mktemp("hdf") / "test_df.hdf"
pdf.to_hdf(fname_df, "hdf_df_tests", format=request.param)
pdf.to_hdf(fname_df, key="hdf_df_tests", format=request.param)

fname_series = {}
for column in pdf.columns:
fname_series[column] = (
tmp_path_factory.mktemp("hdf") / "test_series.hdf"
)
pdf[column].to_hdf(
fname_series[column], "hdf_series_tests", format=request.param
fname_series[column], key="hdf_series_tests", format=request.param
)
return (fname_df, fname_series, request.param, nrows)

Expand Down Expand Up @@ -116,8 +116,8 @@ def test_hdf_writer(tmpdir, pdf, gdf, complib, format):
pdf_df_fname = tmpdir.join("pdf_df.hdf")
gdf_df_fname = tmpdir.join("gdf_df.hdf")

pdf.to_hdf(pdf_df_fname, "hdf_tests", format=format, complib=complib)
gdf.to_hdf(gdf_df_fname, "hdf_tests", format=format, complib=complib)
pdf.to_hdf(pdf_df_fname, key="hdf_tests", format=format, complib=complib)
gdf.to_hdf(gdf_df_fname, key="hdf_tests", format=format, complib=complib)

assert os.path.exists(pdf_df_fname)
assert os.path.exists(gdf_df_fname)
Expand All @@ -135,10 +135,10 @@ def test_hdf_writer(tmpdir, pdf, gdf, complib, format):
gdf_series_fname = tmpdir.join(column + "_" + "gdf_series.hdf")

pdf[column].to_hdf(
pdf_series_fname, "hdf_tests", format=format, complib=complib
pdf_series_fname, key="hdf_tests", format=format, complib=complib
)
gdf[column].to_hdf(
gdf_series_fname, "hdf_tests", format=format, complib=complib
gdf_series_fname, key="hdf_tests", format=format, complib=complib
)

assert os.path.exists(pdf_series_fname)
Expand Down

0 comments on commit 03f63ec

Please sign in to comment.