diff --git a/beginner_source/hyperparameter_tuning_tutorial.py b/beginner_source/hyperparameter_tuning_tutorial.py index 202a2f1a30b..228879fa5f2 100644 --- a/beginner_source/hyperparameter_tuning_tutorial.py +++ b/beginner_source/hyperparameter_tuning_tutorial.py @@ -201,7 +201,7 @@ def forward(self, x): # # The checkpoint saving is optional, however, it is necessary if we wanted to use advanced # schedulers like -# `Population Based Training `_. +# `Population Based Training `_. # Also, by saving the checkpoint we can later load the trained models and validate them # on a test set. Lastly, saving checkpoints is useful for fault tolerance, and it allows # us to interrupt training and continue training later.