@@ -11,7 +11,7 @@ Date updated: 7/30/18
11
11
12
12
```
13
13
import torch # root package
14
- from torch.utils.data import Dataset, Dataloader # dataset representation and loading
14
+ from torch.utils.data import Dataset, DataLoader # dataset representation and loading
15
15
```
16
16
17
17
### Neural Network API
@@ -39,30 +39,25 @@ See [hybrid frontend](https://pytorch.org/docs/stable/hybridfrontend)
39
39
```
40
40
torch.onnx.export(model, dummy data, xxxx.proto) # exports an ONNX formatted model using a trained model, dummy data and the desired file name
41
41
model = onnx.load("alexnet.proto") # load an ONNX model
42
- onnx.checker.check_model(model) # check that the model IR is well formed
42
+ onnx.checker.check_model(model) # check that the model IR is well formed
43
43
onnx.helper.printable_graph(model.graph) # print a human readable representation of the graph
44
44
```
45
45
See [ onnx] ( https://pytorch.org/docs/stable/onnx.html )
46
46
47
-
48
-
49
47
### Vision
50
48
51
49
```
52
50
from torchvision import datasets, models, transforms # vision datasets, architectures & transforms
53
51
import torchvision.transforms as transforms # composable transforms
54
52
```
55
-
56
53
See [ torchvision] ( https://pytorch.org/docs/stable/torchvision/index.html )
57
54
58
55
### Distributed Training
59
56
60
57
```
61
58
import torch.distributed as dist # distributed communication
62
59
from multiprocessing import Process # memory sharing processes
63
-
64
60
```
65
-
66
61
See [ distributed] ( https://pytorch.org/docs/stable/distributed.html ) and [ multiprocessing] ( https://pytorch.org/docs/stable/multiprocessing.html )
67
62
68
63
@@ -79,11 +74,8 @@ x.clone() # clone of x
79
74
with torch.no_grad(): # code wrap that stops autograd from tracking tensor history
80
75
requires_grad=True # arg, when set to True, tracks computation history for future derivative calculations
81
76
```
82
-
83
77
See [ tensor] ( https://pytorch.org/docs/stable/tensors.html )
84
78
85
-
86
-
87
79
### Dimensionality
88
80
89
81
```
@@ -121,7 +113,6 @@ else: #
121
113
122
114
net.to(device) # recursively convert their parameters and buffers to device specific tensors
123
115
mytensor.to(device) # copy your tensors to a device (gpu, cpu)
124
-
125
116
```
126
117
See [ cuda] ( https://pytorch.org/docs/stable/cuda.html )
127
118
@@ -136,11 +127,9 @@ nn.RNN/LSTM/GRU # recurrent layers
136
127
nn.Dropout(p=0.5, inplace=False) # dropout layer for any dimensional input
137
128
nn.Dropout2d(p=0.5, inplace=False) # 2-dimensional channel-wise dropout
138
129
nn.Embedding(num_embeddings, embedding_dim) # (tensor-wise) mapping from indices to embedding vectors
139
-
140
130
```
141
131
See [ nn] ( https://pytorch.org/docs/stable/nn.html )
142
132
143
-
144
133
### Loss Functions
145
134
146
135
```
@@ -170,18 +159,18 @@ See [optimizers](https://pytorch.org/docs/stable/optim.html)
170
159
scheduler = optim.X(optimizer,...) # create lr scheduler
171
160
scheduler.step() # update lr at start of epoch
172
161
optim.lr_scheduler.X where ... # LambdaLR, StepLR, MultiStepLR, ExponentialLR or ReduceLROnPLateau
173
-
174
162
```
175
163
See [ learning rate scheduler] ( https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate )
176
164
165
+
177
166
# Data Utilities
178
167
179
168
### Datasets
180
169
181
170
```
182
171
Dataset # abstract class representing dataset
183
172
TensorDataset # labelled dataset in the form of tensors
184
- Concat Dataset # concatenation of Datasets
173
+ ConcatDataset # concatenation of Datasets
185
174
```
186
175
See [ datasets] ( https://pytorch.org/docs/stable/data.html?highlight=dataset#torch.utils.data.Dataset )
187
176
@@ -191,7 +180,6 @@ See [datasets](https://pytorch.org/docs/stable/data.html?highlight=dataset#torch
191
180
DataLoader(dataset, batch_size=1, ...) # loads data batches agnostic of structure of individual data points
192
181
sampler.Sampler(dataset,...) # abstract class dealing with ways to sample from dataset
193
182
sampler.XSampler where ... # Sequential, Random, Subset, WeightedRandom or Distributed
194
-
195
183
```
196
184
See [ dataloader] ( https://pytorch.org/docs/stable/data.html?highlight=dataloader#torch.utils.data.DataLoader )
197
185
0 commit comments