diff --git a/beginner_source/basics/autogradqs_tutorial.py b/beginner_source/basics/autogradqs_tutorial.py index 8a92fc024a7..ad3d284fee3 100644 --- a/beginner_source/basics/autogradqs_tutorial.py +++ b/beginner_source/basics/autogradqs_tutorial.py @@ -47,7 +47,7 @@ # # In this network, ``w`` and ``b`` are **parameters**, which we need to # optimize. Thus, we need to be able to compute the gradients of loss -# function with respect to those variables. In orded to do that, we set +# function with respect to those variables. In order to do that, we set # the ``requires_grad`` property of those tensors. #######################################################################