diff --git a/beginner_source/examples_nn/two_layer_net_optim.py b/beginner_source/examples_nn/two_layer_net_optim.py index 95950b95729..82b67dcc1b0 100755 --- a/beginner_source/examples_nn/two_layer_net_optim.py +++ b/beginner_source/examples_nn/two_layer_net_optim.py @@ -33,7 +33,7 @@ # Use the optim package to define an Optimizer that will update the weights of # the model for us. Here we will use Adam; the optim package contains many other -# optimization algoriths. The first argument to the Adam constructor tells the +# optimization algorithms. The first argument to the Adam constructor tells the # optimizer which Tensors it should update. learning_rate = 1e-4 optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)