Skip to content

Commit

Permalink
[XPU], support unified ckpt function (PaddlePaddle#9312)
Browse files Browse the repository at this point in the history
  • Loading branch information
cqulilujia authored Oct 25, 2024
1 parent 2bf3d7f commit b237ba7
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 2 deletions.
4 changes: 2 additions & 2 deletions paddlenlp/trainer/plugins/unified_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -1307,7 +1307,7 @@ def check_unified_checkpoint(args, model, resume_from_checkpoint, safe_serializa
else:
local_resume = False
local_resume = paddle.to_tensor([local_resume])
dist.all_reduce(local_resume, op=dist.ReduceOp.PROD)
dist.all_reduce(local_resume, op=dist.ReduceOp.MIN)
local_resume = local_resume.item()
return local_resume

Expand Down Expand Up @@ -1425,7 +1425,7 @@ def check_dynamic_load(args, weight_map, existed_files, is_master_weights=False,
else:
local_resume = False
local_resume = paddle.to_tensor([local_resume])
dist.all_reduce(local_resume, op=dist.ReduceOp.PROD)
dist.all_reduce(local_resume, op=dist.ReduceOp.MIN)
return local_resume.item()

# check whether the optimizer checkpoint files are complete.
Expand Down
6 changes: 6 additions & 0 deletions paddlenlp/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1793,6 +1793,12 @@ def _load_rng_state(self, checkpoint):
for i in range(core.get_cuda_device_count()):
core.default_cuda_generator(i).set_state(checkpoint_rng_state["cuda"][i])

if core.is_compiled_with_xpu():
if not len(checkpoint_rng_state["cuda"]) == core.get_xpu_device_count():
raise ValueError("Length of xpu state list shoule be equal to the xpu device count")
for i in range(core.get_xpu_device_count()):
core.default_xpu_generator(i).set_state(checkpoint_rng_state["cuda"][i])

if paddle.device.get_all_custom_device_type() is not None:
custom_device_type = paddle.device.get_all_custom_device_type()
for device in custom_device_type:
Expand Down

0 comments on commit b237ba7

Please sign in to comment.