From 606d0aa188d94e89200d6e9aca3622d869232039 Mon Sep 17 00:00:00 2001 From: keineahnung2345 Date: Tue, 6 Nov 2018 17:54:07 +0800 Subject: [PATCH] [Bug fix] redundant layers in ResNet In https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L115, it defined a length 4 `layers`. But in https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L84, it only uses `layers[0]` and `layers[1]`. So the last entry of [2,2,2,2] should be redundant. --- tutorials/02-intermediate/deep_residual_network/main.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tutorials/02-intermediate/deep_residual_network/main.py b/tutorials/02-intermediate/deep_residual_network/main.py index 2b2e43a7..f1bb1365 100644 --- a/tutorials/02-intermediate/deep_residual_network/main.py +++ b/tutorials/02-intermediate/deep_residual_network/main.py @@ -82,8 +82,8 @@ def __init__(self, block, layers, num_classes=10): self.bn = nn.BatchNorm2d(16) self.relu = nn.ReLU(inplace=True) self.layer1 = self.make_layer(block, 16, layers[0]) - self.layer2 = self.make_layer(block, 32, layers[0], 2) - self.layer3 = self.make_layer(block, 64, layers[1], 2) + self.layer2 = self.make_layer(block, 32, layers[1], 2) + self.layer3 = self.make_layer(block, 64, layers[2], 2) self.avg_pool = nn.AvgPool2d(8) self.fc = nn.Linear(64, num_classes) @@ -112,7 +112,7 @@ def forward(self, x): out = self.fc(out) return out -model = ResNet(ResidualBlock, [2, 2, 2, 2]).to(device) +model = ResNet(ResidualBlock, [2, 2, 2]).to(device) # Loss and optimizer @@ -166,4 +166,4 @@ def update_lr(optimizer, lr): print('Accuracy of the model on the test images: {} %'.format(100 * correct / total)) # Save the model checkpoint -torch.save(model.state_dict(), 'resnet.ckpt') \ No newline at end of file +torch.save(model.state_dict(), 'resnet.ckpt')