@@ -124,7 +124,7 @@ class FaceLandmarksDataset(Dataset):
124
124
125
125
def __init__ (self , csv_file , root_dir , transform = None ):
126
126
"""
127
- Args :
127
+ Arguments :
128
128
csv_file (string): Path to the csv file with annotations.
129
129
root_dir (string): Directory with all the images.
130
130
transform (callable, optional): Optional transform to be applied
@@ -197,7 +197,7 @@ def __getitem__(self, idx):
197
197
# swap axes).
198
198
#
199
199
# We will write them as callable classes instead of simple functions so
200
- # that parameters of the transform need not be passed everytime it's
200
+ # that parameters of the transform need not be passed every time it's
201
201
# called. For this, we just need to implement ``__call__`` method and
202
202
# if required, ``__init__`` method. We can then use a transform like this:
203
203
#
@@ -291,12 +291,12 @@ def __call__(self, sample):
291
291
image = image .transpose ((2 , 0 , 1 ))
292
292
return {'image' : torch .from_numpy (image ),
293
293
'landmarks' : torch .from_numpy (landmarks )}
294
-
294
+
295
295
######################################################################
296
296
# .. note::
297
297
# In the example above, `RandomCrop` uses an external library's random number generator
298
- # (in this case, Numpy's `np.random.int`). This can result in unexpected behavior with `DataLoader`
299
- # (see https://pytorch.org/docs/stable/notes/faq.html#my-data-loader-workers-return-identical-random-numbers).
298
+ # (in this case, Numpy's `np.random.int`). This can result in unexpected behavior with `DataLoader`
299
+ # (see `here < https://pytorch.org/docs/stable/notes/faq.html#my-data-loader-workers-return-identical-random-numbers>`_).
300
300
# In practice, it is safer to stick to PyTorch's random number generator, e.g. by using `torch.randint` instead.
301
301
302
302
######################################################################
@@ -404,7 +404,7 @@ def show_landmarks_batch(sample_batched):
404
404
plt .title ('Batch from dataloader' )
405
405
406
406
# if you are using Windows, uncomment the next line and indent the for loop.
407
- # you might need to go back and change " num_workers" to 0.
407
+ # you might need to go back and change `` num_workers`` to 0.
408
408
409
409
# if __name__ == '__main__':
410
410
for i_batch , sample_batched in enumerate (dataloader ):
@@ -444,21 +444,21 @@ def show_landmarks_batch(sample_batched):
444
444
# which operate on ``PIL.Image`` like ``RandomHorizontalFlip``, ``Scale``,
445
445
# are also available. You can use these to write a dataloader like this: ::
446
446
#
447
- # import torch
448
- # from torchvision import transforms, datasets
449
- #
450
- # data_transform = transforms.Compose([
451
- # transforms.RandomSizedCrop(224),
452
- # transforms.RandomHorizontalFlip(),
453
- # transforms.ToTensor(),
454
- # transforms.Normalize(mean=[0.485, 0.456, 0.406],
455
- # std=[0.229, 0.224, 0.225])
456
- # ])
457
- # hymenoptera_dataset = datasets.ImageFolder(root='hymenoptera_data/train',
458
- # transform=data_transform)
459
- # dataset_loader = torch.utils.data.DataLoader(hymenoptera_dataset,
460
- # batch_size=4, shuffle=True,
461
- # num_workers=4)
447
+ # import torch
448
+ # from torchvision import transforms, datasets
449
+ #
450
+ # data_transform = transforms.Compose([
451
+ # transforms.RandomSizedCrop(224),
452
+ # transforms.RandomHorizontalFlip(),
453
+ # transforms.ToTensor(),
454
+ # transforms.Normalize(mean=[0.485, 0.456, 0.406],
455
+ # std=[0.229, 0.224, 0.225])
456
+ # ])
457
+ # hymenoptera_dataset = datasets.ImageFolder(root='hymenoptera_data/train',
458
+ # transform=data_transform)
459
+ # dataset_loader = torch.utils.data.DataLoader(hymenoptera_dataset,
460
+ # batch_size=4, shuffle=True,
461
+ # num_workers=4)
462
462
#
463
463
# For an example with training code, please see
464
464
# :doc:`transfer_learning_tutorial`.
0 commit comments