From ae2bfc3e7fd2ea833cb484299b76b674e1361f05 Mon Sep 17 00:00:00 2001 From: Minh Dang <38881541+minhdang241@users.noreply.github.com> Date: Thu, 7 May 2020 17:14:03 +0700 Subject: [PATCH] Update nn_tutorial.py the description of @ is misleading. It should be maxtrix multiplication not dot product. --- beginner_source/nn_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/nn_tutorial.py b/beginner_source/nn_tutorial.py index 5f915a3d3ea..9f4af59ed7c 100644 --- a/beginner_source/nn_tutorial.py +++ b/beginner_source/nn_tutorial.py @@ -136,7 +136,7 @@ def model(xb): return log_softmax(xb @ weights + bias) ############################################################################### -# In the above, the ``@`` stands for the dot product operation. We will call +# In the above, the ``@`` stands for the matrix multiplication operation. We will call # our function on one batch of data (in this case, 64 images). This is # one *forward pass*. Note that our predictions won't be any better than # random at this stage, since we start with random weights.