From 616b0f90344e760141efb02f8f14cf5c84e2a0d0 Mon Sep 17 00:00:00 2001 From: Kai Li <1196594711@qq.com> Date: Sun, 9 Apr 2017 14:33:25 +0800 Subject: [PATCH 1/2] Update transfer_learning_tutorial.py --- beginner_source/transfer_learning_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/transfer_learning_tutorial.py b/beginner_source/transfer_learning_tutorial.py index 116de36587b..15c8914805b 100644 --- a/beginner_source/transfer_learning_tutorial.py +++ b/beginner_source/transfer_learning_tutorial.py @@ -297,7 +297,7 @@ def optim_scheduler_ft(model, epoch, init_lr=0.001, lr_decay_epoch=7): param.requires_grad = False # Parameters of newly constructed modules have requires_grad=True by default -model.fc = nn.Linear(512, 100) +model.fc = nn.Linear(512, 2) if use_gpu: model = model.cuda() From 185bef017236523718247f1028b129ecd1f5ec88 Mon Sep 17 00:00:00 2001 From: Kai Li <1196594711@qq.com> Date: Sun, 9 Apr 2017 14:47:45 +0800 Subject: [PATCH 2/2] Update transfer_learning_tutorial.py --- beginner_source/transfer_learning_tutorial.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/beginner_source/transfer_learning_tutorial.py b/beginner_source/transfer_learning_tutorial.py index 15c8914805b..de15552282f 100644 --- a/beginner_source/transfer_learning_tutorial.py +++ b/beginner_source/transfer_learning_tutorial.py @@ -297,7 +297,8 @@ def optim_scheduler_ft(model, epoch, init_lr=0.001, lr_decay_epoch=7): param.requires_grad = False # Parameters of newly constructed modules have requires_grad=True by default -model.fc = nn.Linear(512, 2) +num_ftrs = model.fc.in_features +model.fc = nn.Linear(num_ftrs, 2) if use_gpu: model = model.cuda()