Skip to content

Commit

Permalink
add deepspeed cpu Adam & AdamW
Browse files Browse the repository at this point in the history
  • Loading branch information
eljandoubi committed Nov 26, 2024
1 parent 1cdbb40 commit 53e5400
Showing 1 changed file with 13 additions and 0 deletions.
13 changes: 13 additions & 0 deletions src/accelerate/utils/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,19 @@ def map_pytorch_optim_to_deepspeed(optimizer):

optimizer_class = DeepSpeedCPUAdam

# For DeepSpeedCPUAdam (adamw_mode)
if compare_versions("deepspeed", ">=", "0.3.1"):
defaults["adamw_mode"] = False
is_adaw = isinstance(optimizer, optim.AdamW)

if is_bnb_available() and not is_adaw:
import bitsandbytes.optim as bnb_opt

is_adaw = isinstance(optimizer, (bnb_opt.AdamW, bnb_opt.AdamW32bit)) and optimizer.optim_bits == 32

if is_adaw:
defaults["adamw_mode"] = True

# For DeepSpeedCPUAdagrad
if compare_versions("deepspeed", ">=", "0.5.5"):
# Check if the optimizer is PyTorch's Adagrad.
Expand Down

0 comments on commit 53e5400

Please sign in to comment.