From f9f63b39fffab38447f4f106e842aea82b760d47 Mon Sep 17 00:00:00 2001 From: Pierre Tholoniat Date: Thu, 23 Jul 2020 16:31:31 -0400 Subject: [PATCH] Fix typo. --- beginner_source/blitz/neural_networks_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/blitz/neural_networks_tutorial.py b/beginner_source/blitz/neural_networks_tutorial.py index 144dd3d144f..fe41e685f12 100644 --- a/beginner_source/blitz/neural_networks_tutorial.py +++ b/beginner_source/blitz/neural_networks_tutorial.py @@ -176,7 +176,7 @@ def num_flat_features(self, x): # -> loss # # So, when we call ``loss.backward()``, the whole graph is differentiated -# w.r.t. the loss, and all Tensors in the graph that has ``requires_grad=True`` +# w.r.t. the loss, and all Tensors in the graph that have ``requires_grad=True`` # will have their ``.grad`` Tensor accumulated with the gradient. # # For illustration, let us follow a few steps backward: