Skip to content

Commit

Permalink
drop PL_DDP_PID
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Oct 21, 2020
1 parent e578306 commit 7d88cae
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/accelerators/ddp_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def _call_children_scripts(self):
for local_rank in range(1, self.trainer.num_processes):
env_copy = os.environ.copy()
env_copy['LOCAL_RANK'] = f'{local_rank}'
env_copy['PL_DDP_PID'] = str(self.trainer.data_parallel_device_ids[local_rank])
# env_copy['PL_DDP_PID'] = str(self.trainer.data_parallel_device_ids[local_rank])
# env_copy.pop('CUDA_VISIBLE_DEVICES', None)
print("pl id", env_copy["PL_DDP_PID"], " assigned to local rank", local_rank, self.trainer.data_parallel_device_ids)
# remove env var if global seed not set
Expand All @@ -141,7 +141,7 @@ def _call_children_scripts(self):
sleep(delay)

# os.environ['PL_DDP_PID'] = str(self.trainer.data_parallel_device_ids[0])
os.environ['PL_DDP_PID'] = '0'
# os.environ['PL_DDP_PID'] = '0'

def train(self):
model = self.trainer.model
Expand Down

0 comments on commit 7d88cae

Please sign in to comment.