Skip to content

Commit

Permalink
Set offload checkpoint policy
Browse files Browse the repository at this point in the history
  • Loading branch information
jaro-sevcik committed Mar 12, 2024
1 parent 378c6ad commit ad18fb0
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions paxml/tasks/lm/model_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -663,6 +663,12 @@ def task(self) -> pax_fiddle.Config[tasks_lib.SingleTask]:
self.CHECKPOINT_POLICY)
else:
model_p.lm_tpl.stacked_transformer_tpl = stacked_transformer_tpl
if (self.CHECKPOINT_POLICY ==
layers.AutodiffCheckpointType.OFFLOAD_DOT_WITH_NO_BATCH_DIM):
model_p.lm_tpl.stacked_transformer_tpl.checkpoint_policy = (
self.CHECKPOINT_POLICY)
model_p.lm_tpl.stacked_transformer_tpl.remat = True


# Enable bf16.
model_p.fprop_dtype = self.FPROP_DTYPE
Expand Down

0 comments on commit ad18fb0

Please sign in to comment.