Skip to content

Commit

Permalink
latest bnb no longer has optim_args attribute on optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Dec 23, 2024
1 parent 200c9eb commit 5d2a8a9
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion src/accelerate/utils/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,13 @@ def map_pytorch_optim_to_deepspeed(optimizer):
if is_bnb_available() and not is_adaw:
import bitsandbytes.optim as bnb_opt

is_adaw = isinstance(optimizer, (bnb_opt.AdamW, bnb_opt.AdamW32bit)) and optimizer.optim_bits == 32
if isinstance(optimizer, (bnb_opt.AdamW, bnb_opt.AdamW32bit)):
try:
is_adaw = optimizer.optim_bits == 32
except AttributeError:
is_adaw = optimizer.args.optim_bits == 32
else:
is_adaw = False

if is_adaw:
defaults["adamw_mode"] = True
Expand Down

0 comments on commit 5d2a8a9

Please sign in to comment.