Skip to content

Commit

Permalink
Fix hsdp_device_mesh=None when enable HSDP and HYBRID_SHARD (#402)
Browse files Browse the repository at this point in the history
Co-authored-by: haozhx23 <[email protected]>
Co-authored-by: Matthias Reso <[email protected]>
  • Loading branch information
3 people authored Jun 20, 2024
1 parent 4e1466c commit e6b0f97
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions src/llama_recipes/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,10 +163,9 @@ def main(**kwargs):
wandb_run.config.update(peft_config)
model.print_trainable_parameters()


hsdp_device_mesh = None
hsdp_device_mesh_plan = None
if fsdp_config.hsdp and fsdp_config.sharding_strategy == ShardingStrategy.HYBRID_SHARD:
hsdp_device_mesh = hsdp_device_mesh(replica_group_size=fsdp_config.replica_group_size, sharding_group_size=fsdp_config.sharding_group_size)
hsdp_device_mesh_plan = hsdp_device_mesh(replica_group_size=fsdp_config.replica_group_size, sharding_group_size=fsdp_config.sharding_group_size)
print("HSDP device mesh is ready")

#setting up FSDP if enable_fsdp is enabled
Expand All @@ -189,7 +188,7 @@ def main(**kwargs):
cpu_offload=CPUOffload(offload_params=True) if fsdp_config.fsdp_cpu_offload else None,
mixed_precision=mixed_precision_policy if not fsdp_config.pure_bf16 else None,
sharding_strategy=fsdp_config.sharding_strategy,
device_mesh=hsdp_device_mesh,
device_mesh=hsdp_device_mesh_plan,
device_id=device_id,
limit_all_gathers=True,
sync_module_states=train_config.low_cpu_fsdp,
Expand Down

0 comments on commit e6b0f97

Please sign in to comment.