@@ -22,27 +22,12 @@ Neural Network API
22
22
import torch.nn as nn # neural networks
23
23
import torch.nn.functional as F # layers, activations and more
24
24
import torch.optim as optim # optimizers e.g. gradient descent, ADAM, etc.
25
- from torch.jit import script, trace # hybrid frontend decorator and tracing jit
26
25
27
26
See `autograd <https://pytorch.org/docs/stable/autograd.html >`__,
28
27
`nn <https://pytorch.org/docs/stable/nn.html >`__,
29
28
`functional <https://pytorch.org/docs/stable/nn.html#torch-nn-functional >`__
30
29
and `optim <https://pytorch.org/docs/stable/optim.html >`__
31
30
32
- TorchScript and JIT
33
- -------------------
34
-
35
- .. code-block :: python
36
-
37
- torch.jit.trace() # takes your module or function and an example
38
- # data input, and traces the computational steps
39
- # that the data encounters as it progresses through the model
40
-
41
- @script # decorator used to indicate data-dependent
42
- # control flow within the code being traced
43
-
44
- See `Torchscript <https://pytorch.org/docs/stable/jit.html >`__
45
-
46
31
ONNX
47
32
----
48
33
@@ -225,8 +210,10 @@ Optimizers
225
210
226
211
opt = optim.x(model.parameters(), ... ) # create optimizer
227
212
opt.step() # update weights
228
- optim.X # where X is SGD, Adadelta, Adagrad, Adam,
229
- # AdamW, SparseAdam, Adamax, ASGD,
213
+ opt.zero_grad() # clear the gradients
214
+ optim.X # where X is SGD, AdamW, Adam,
215
+ # Adafactor, NAdam, RAdam, Adadelta,
216
+ # Adagrad, SparseAdam, Adamax, ASGD,
230
217
# LBFGS, RMSprop or Rprop
231
218
232
219
See `optimizers <https://pytorch.org/docs/stable/optim.html >`__
0 commit comments