-
Notifications
You must be signed in to change notification settings - Fork 915
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add ability to enable rmm pool on cudf.pandas
import
#15628
Changes from 5 commits
ed73c18
2dd9c3b
583c87f
8d1c367
1f2132b
5ca4602
7693533
919fe0e
531a585
748e1f8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
@@ -1,7 +1,9 @@ | ||||||
# SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. | ||||||
# SPDX-FileCopyrightText: Copyright (c) 2023-2024, NVIDIA CORPORATION & AFFILIATES. | ||||||
# All rights reserved. | ||||||
# SPDX-License-Identifier: Apache-2.0 | ||||||
|
||||||
import warnings | ||||||
|
||||||
from .magics import load_ipython_extension | ||||||
from .profiler import Profiler | ||||||
|
||||||
|
@@ -18,6 +20,46 @@ def install(): | |||||
loader = ModuleAccelerator.install("pandas", "cudf", "pandas") | ||||||
global LOADED | ||||||
LOADED = loader is not None | ||||||
import os | ||||||
|
||||||
if (rmm_mode := os.getenv("CUDF_PANDAS_RMM_MODE", None)) is not None: | ||||||
import rmm.mr | ||||||
from rmm._lib.memory_resource import get_free_device_memory | ||||||
|
||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This method is being added in: rapidsai/rmm#1567 |
||||||
# Check if a non-default memory resource is set | ||||||
current_mr = rmm.mr.get_current_device_resource() | ||||||
if not isinstance(current_mr, rmm.mr.CudaMemoryResource): | ||||||
warnings.warn( | ||||||
f"cudf.pandas detected an already configured memory resource, ignoring 'CUDF_PANDAS_RMM_MODE'={str(rmm_mode)}", | ||||||
UserWarning, | ||||||
) | ||||||
|
||||||
if rmm_mode == "cuda": | ||||||
mr = rmm.mr.CudaMemoryResource() | ||||||
rmm.mr.set_current_device_resource(mr) | ||||||
elif rmm_mode == "pool": | ||||||
rmm.mr.set_current_device_resource( | ||||||
rmm.mr.PoolMemoryResource( | ||||||
rmm.mr.get_current_device_resource(), | ||||||
initial_pool_size=get_free_device_memory(80), | ||||||
) | ||||||
) | ||||||
elif rmm_mode == "async": | ||||||
mr = rmm.mr.CudaAsyncMemoryResource( | ||||||
initial_pool_size=get_free_device_memory(80) | ||||||
) | ||||||
rmm.mr.set_current_device_resource(mr) | ||||||
elif rmm_mode == "managed": | ||||||
mr = rmm.mr.ManagedMemoryResource() | ||||||
rmm.mr.set_current_device_resource(mr) | ||||||
elif rmm_mode == "managed_pool": | ||||||
rmm.reinitialize( | ||||||
managed_memory=True, | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why does this call reinitialize while other modes create and set the current device resource? |
||||||
pool_allocator=True, | ||||||
initial_pool_size=get_free_device_memory(80), | ||||||
) | ||||||
else: | ||||||
raise TypeError(f"Unsupported rmm mode: {rmm_mode}") | ||||||
|
||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
|
||||||
def pytest_load_initial_conftests(early_config, parser, args): | ||||||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Move this import to the top of the file.