From 09460601a9f914511d87c12c4e0b04dc21df3086 Mon Sep 17 00:00:00 2001 From: Seth Weidman Date: Thu, 3 Oct 2019 14:14:18 -0700 Subject: [PATCH 1/2] First commit for reorganizing beginner tutorials --- beginner_source/blitz/cifar10_tutorial.py | 16 +++++ beginner_source/data_loading_tutorial.py | 4 +- beginner_source/pytorch_with_examples.rst | 37 ----------- beginner_source/transfer_learning_tutorial.py | 10 +-- index.rst | 65 +++++++++---------- 5 files changed, 55 insertions(+), 77 deletions(-) diff --git a/beginner_source/blitz/cifar10_tutorial.py b/beginner_source/blitz/cifar10_tutorial.py index 8760facfa16..2d396abdb55 100644 --- a/beginner_source/blitz/cifar10_tutorial.py +++ b/beginner_source/blitz/cifar10_tutorial.py @@ -185,6 +185,15 @@ def forward(self, x): print('Finished Training') ######################################################################## +# Let's quickly save our trained model: + +PATH = './data' +torch.save(net.state_dict(), PATH) + +######################################################################## +# See `here `_ +# for more details on saving PyTorch models. +# # 5. Test the network on the test data # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # @@ -204,6 +213,13 @@ def forward(self, x): imshow(torchvision.utils.make_grid(images)) print('GroundTruth: ', ' '.join('%5s' % classes[labels[j]] for j in range(4))) +######################################################################## +# Next, let's load back in our saved model (note: saving and re-loading the model +# wasn't necessary here, we only did it to illustrate how to do so): + +net = Net() +net.load_state_dict(torch.load(PATH)) + ######################################################################## # Okay, now let us see what the neural network thinks these examples above are: diff --git a/beginner_source/data_loading_tutorial.py b/beginner_source/data_loading_tutorial.py index a40b7e911ee..c9b1a295f82 100644 --- a/beginner_source/data_loading_tutorial.py +++ b/beginner_source/data_loading_tutorial.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ -Data Loading and Processing Tutorial -==================================== +Writing Custom Datasets, DataLoaders and Transforms +=================================================== **Author**: `Sasank Chilamkurthy `_ A lot of effort in solving any machine learning problem goes in to diff --git a/beginner_source/pytorch_with_examples.rst b/beginner_source/pytorch_with_examples.rst index 772ec58966e..a9f56268b25 100644 --- a/beginner_source/pytorch_with_examples.rst +++ b/beginner_source/pytorch_with_examples.rst @@ -123,43 +123,6 @@ network: .. includenodoc:: /beginner/examples_autograd/two_layer_net_custom_function.py -TensorFlow: Static Graphs -------------------------- - -PyTorch autograd looks a lot like TensorFlow: in both frameworks we -define a computational graph, and use automatic differentiation to -compute gradients. The biggest difference between the two is that -TensorFlow's computational graphs are **static** and PyTorch uses -**dynamic** computational graphs. - -In TensorFlow, we define the computational graph once and then execute -the same graph over and over again, possibly feeding different input -data to the graph. In PyTorch, each forward pass defines a new -computational graph. - -Static graphs are nice because you can optimize the graph up front; for -example a framework might decide to fuse some graph operations for -efficiency, or to come up with a strategy for distributing the graph -across many GPUs or many machines. If you are reusing the same graph -over and over, then this potentially costly up-front optimization can be -amortized as the same graph is rerun over and over. - -One aspect where static and dynamic graphs differ is control flow. For -some models we may wish to perform different computation for each data -point; for example a recurrent network might be unrolled for different -numbers of time steps for each data point; this unrolling can be -implemented as a loop. With a static graph the loop construct needs to -be a part of the graph; for this reason TensorFlow provides operators -such as ``tf.scan`` for embedding loops into the graph. With dynamic -graphs the situation is simpler: since we build graphs on-the-fly for -each example, we can use normal imperative flow control to perform -computation that differs for each input. - -To contrast with the PyTorch autograd example above, here we use -TensorFlow to fit a simple two-layer net: - -.. includenodoc:: /beginner/examples_autograd/tf_two_layer_net.py - `nn` module =========== diff --git a/beginner_source/transfer_learning_tutorial.py b/beginner_source/transfer_learning_tutorial.py index 795abbd63e7..265939ef228 100644 --- a/beginner_source/transfer_learning_tutorial.py +++ b/beginner_source/transfer_learning_tutorial.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- """ -Transfer Learning Tutorial -========================== +Transfer Learning for Computer Vision Tutorial +============================================== **Author**: `Sasank Chilamkurthy `_ -In this tutorial, you will learn how to train your network using -transfer learning. You can read more about the transfer learning at `cs231n -notes `__ +In this tutorial, you will learn how to train a convolutional neural network for +image classification using transfer learning. You can read more about the transfer +learning at `cs231n notes `__ Quoting these notes, diff --git a/index.rst b/index.rst index 88bc2c431a8..fc8b630760a 100644 --- a/index.rst +++ b/index.rst @@ -38,34 +38,11 @@ Getting Started :tooltip: Learn how to load and preprocess/augment data from a non trivial dataset :description: :doc:`/beginner/data_loading_tutorial` -.. customgalleryitem:: - :tooltip: This tutorial introduces the fundamental concepts of PyTorch through self-contained examples - :figure: /_static/img/thumbnails/examples.png - :description: :doc:`/beginner/pytorch_with_examples` - -.. customgalleryitem:: - :figure: /_static/img/thumbnails/sphx_glr_transfer_learning_tutorial_001.png - :tooltip: In transfer learning, a model created from one task is used in another - :description: :doc:`beginner/transfer_learning_tutorial` - -.. customgalleryitem:: - :figure: /_static/img/thumbnails/floppy.png - :tooltip: Explore use cases for the saving and loading of PyTorch models - :description: :doc:`beginner/saving_loading_models` - -.. .. galleryitem:: beginner/saving_loading_models.py - .. customgalleryitem:: :figure: /_static/img/thumbnails/pytorch_tensorboard.png :tooltip: Learn to use TensorBoard to visualize data and model training :description: :doc:`intermediate/tensorboard_tutorial` -.. customgalleryitem:: - :figure: /_static/img/torch.nn.png - :tooltip: Use torch.nn to create and train a neural network - :description: :doc:`beginner/nn_tutorial` - - .. raw:: html
@@ -80,9 +57,9 @@ Image :description: :doc:`intermediate/torchvision_tutorial` .. customgalleryitem:: - :figure: /_static/img/thumbnails/eye.png - :tooltip: Finetune and feature extract the torchvision models - :description: :doc:`beginner/finetuning_torchvision_models_tutorial` + :figure: /_static/img/thumbnails/sphx_glr_transfer_learning_tutorial_001.png + :tooltip: In transfer learning, a model created from one task is used in another + :description: :doc:`beginner/transfer_learning_tutorial` .. customgalleryitem:: :figure: /_static/img/stn/Five.gif @@ -247,7 +224,7 @@ Extending PyTorch
PyTorch in Other Languages -------------- +-------------------------- .. customgalleryitem:: :tooltip: Using the PyTorch C++ Frontend @@ -258,6 +235,24 @@ PyTorch in Other Languages
+PyTorch Fundamentals In-Depth +----------------------------- + +.. customgalleryitem:: + :tooltip: This tutorial introduces the fundamental concepts of PyTorch through self-contained examples + :figure: /_static/img/thumbnails/examples.png + :description: :doc:`/beginner/pytorch_with_examples` + +.. customgalleryitem:: + :figure: /_static/img/torch.nn.png + :tooltip: Use torch.nn to create and train a neural network + :description: :doc:`beginner/nn_tutorial` + +.. raw:: html + +
+ + .. ----------------------------------------- .. Page TOC .. ----------------------------------------- @@ -269,12 +264,7 @@ PyTorch in Other Languages beginner/deep_learning_60min_blitz beginner/data_loading_tutorial - beginner/pytorch_with_examples - beginner/transfer_learning_tutorial - beginner/deploy_seq2seq_hybrid_frontend_tutorial intermediate/tensorboard_tutorial - beginner/saving_loading_models - beginner/nn_tutorial .. toctree:: :maxdepth: 2 @@ -283,7 +273,7 @@ PyTorch in Other Languages :caption: Image intermediate/torchvision_tutorial - beginner/finetuning_torchvision_models_tutorial + beginner/transfer_learning_tutorial intermediate/spatial_transformer_tutorial advanced/neural_style_tutorial beginner/fgsm_tutorial @@ -357,3 +347,12 @@ PyTorch in Other Languages :caption: PyTorch in Other Languages advanced/cpp_frontend + +.. toctree:: + :maxdepth: 2 + :includehidden: + :hidden: + :caption: PyTorch Fundamentals In-Depth + + beginner/pytorch_with_examples + beginner/nn_tutorial From a95d19e32b5b913466678c1692dbfecac615fd6b Mon Sep 17 00:00:00 2001 From: Seth Weidman Date: Thu, 3 Oct 2019 15:20:51 -0700 Subject: [PATCH 2/2] Fix path in CIFAR tutorial --- beginner_source/blitz/cifar10_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/blitz/cifar10_tutorial.py b/beginner_source/blitz/cifar10_tutorial.py index 2d396abdb55..730bf6ac986 100644 --- a/beginner_source/blitz/cifar10_tutorial.py +++ b/beginner_source/blitz/cifar10_tutorial.py @@ -187,7 +187,7 @@ def forward(self, x): ######################################################################## # Let's quickly save our trained model: -PATH = './data' +PATH = './cifar_net.pth' torch.save(net.state_dict(), PATH) ########################################################################