From 3d17a740ce7094f1d49fd98d2ddd9f2495d9223d Mon Sep 17 00:00:00 2001 From: atqy Date: Sat, 30 Jul 2022 00:13:04 -0700 Subject: [PATCH] tet another notebook --- .../pytorch/data_parallel/yolov5/yolov5.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/distributed_training/pytorch/data_parallel/yolov5/yolov5.ipynb b/training/distributed_training/pytorch/data_parallel/yolov5/yolov5.ipynb index f90b4ddb2f..3d783a3f3b 100644 --- a/training/distributed_training/pytorch/data_parallel/yolov5/yolov5.ipynb +++ b/training/distributed_training/pytorch/data_parallel/yolov5/yolov5.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Distributed data parallel YOLOv5 training with PyTorch and SageMaker distributed\n", + "# Distributed data parallel YOLOv5 training with PyTorch and SageMaker distributed test\n", "\n", "[Amazon SageMaker's distributed library](https://docs.aws.amazon.com/sagemaker/latest/dg/distributed-training.html) can be used to train deep learning models faster and cheaper. The [data parallel](https://docs.aws.amazon.com/sagemaker/latest/dg/data-parallel.html) feature in this library (`smdistributed.dataparallel`) is a distributed data parallel training framework for PyTorch, TensorFlow, and MXNet.\n", "\n",