From 1a3c19d26ac32ced9c432f6307a511dca6d1e179 Mon Sep 17 00:00:00 2001 From: rht Date: Fri, 15 Nov 2019 20:15:38 +0000 Subject: [PATCH] beginner/blitz/nn: Fix misleading typo on which term to be differentiated against --- beginner_source/blitz/neural_networks_tutorial.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/beginner_source/blitz/neural_networks_tutorial.py b/beginner_source/blitz/neural_networks_tutorial.py index 04b099a3066..8deb7fd1992 100644 --- a/beginner_source/blitz/neural_networks_tutorial.py +++ b/beginner_source/blitz/neural_networks_tutorial.py @@ -176,8 +176,9 @@ def num_flat_features(self, x): # -> loss # # So, when we call ``loss.backward()``, the whole graph is differentiated -# w.r.t. the loss, and all Tensors in the graph that has ``requires_grad=True`` -# will have their ``.grad`` Tensor accumulated with the gradient. +# w.r.t. the neural net parameters, and all Tensors in the graph that has +# ``requires_grad=True`` will have their ``.grad`` Tensor accumulated with the +# gradient. # # For illustration, let us follow a few steps backward: