diff --git a/beginner_source/basics/optimization_tutorial.py b/beginner_source/basics/optimization_tutorial.py index 93aed46161d..c6c327f8511 100644 --- a/beginner_source/basics/optimization_tutorial.py +++ b/beginner_source/basics/optimization_tutorial.py @@ -163,7 +163,7 @@ def train_loop(dataloader, model, loss_fn, optimizer): optimizer.zero_grad() if batch % 100 == 0: - loss, current = loss.item(), (batch + 1) * len(X) + loss, current = loss.item(), batch * batch_size + len(X) print(f"loss: {loss:>7f} [{current:>5d}/{size:>5d}]")