Skip to content

Commit

Permalink
Fixing syntax error
Browse files Browse the repository at this point in the history
  • Loading branch information
Lokiiiiii committed Aug 10, 2022
1 parent b362213 commit 908f63c
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/sagemaker_training/pytorch_xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def _create_command(self):
"Please use a python script as the entry-point"
)

def __pytorch_xla_command(self):
def _pytorch_xla_command(self):
return [
self._python_command(),
"-m",
Expand All @@ -111,7 +111,7 @@ def __pytorch_xla_command(self):
str(self._num_gpus),
]

def __check_compatibility(self):
def _check_compatibility(self):
try:
import torch_xla # pylint: disable=unused-import
except ModuleNotFoundError as exception:
Expand Down
2 changes: 1 addition & 1 deletion test/unit/test_pytorch_xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def num_gpus(instance_type):
@pytest.mark.parametrize("instance_type", ["ml.p3.16xlarge", "ml.p3.2xlarge"])
@pytest.mark.parametrize("cluster_size", [1, 4])
class TestPyTorchXLARunner:
@patch.object(PyTorchXLARunner, "__check_compatibility")
@patch.object(PyTorchXLARunner, "_check_compatibility")
def test_setup(self, *patches):
for current_host in cluster:
rank = cluster.index(current_host)
Expand Down

0 comments on commit 908f63c

Please sign in to comment.