resnet18.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. import torch
  2. import torch.nn as nn
  3. import torch.nn.functional as F
  4. from easyfl.models import BaseModel
  5. class BasicBlock(nn.Module):
  6. expansion = 1
  7. def __init__(self, in_planes, planes, stride=1):
  8. super(BasicBlock, self).__init__()
  9. self.conv1 = nn.Conv2d(
  10. in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
  11. self.bn1 = nn.BatchNorm2d(planes)
  12. self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
  13. stride=1, padding=1, bias=False)
  14. self.bn2 = nn.BatchNorm2d(planes)
  15. self.shortcut = nn.Sequential()
  16. if stride != 1 or in_planes != self.expansion * planes:
  17. self.shortcut = nn.Sequential(
  18. nn.Conv2d(in_planes, self.expansion * planes,
  19. kernel_size=1, stride=stride, bias=False),
  20. nn.BatchNorm2d(self.expansion * planes)
  21. )
  22. def forward(self, x):
  23. out = F.relu(self.bn1(self.conv1(x)))
  24. out = self.bn2(self.conv2(out))
  25. out += self.shortcut(x)
  26. out = F.relu(out)
  27. return out
  28. class Bottleneck(nn.Module):
  29. expansion = 4
  30. def __init__(self, in_planes, planes, stride=1):
  31. super(Bottleneck, self).__init__()
  32. self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
  33. self.bn1 = nn.BatchNorm2d(planes)
  34. self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
  35. stride=stride, padding=1, bias=False)
  36. self.bn2 = nn.BatchNorm2d(planes)
  37. self.conv3 = nn.Conv2d(planes, self.expansion *
  38. planes, kernel_size=1, bias=False)
  39. self.bn3 = nn.BatchNorm2d(self.expansion * planes)
  40. self.shortcut = nn.Sequential()
  41. if stride != 1 or in_planes != self.expansion * planes:
  42. self.shortcut = nn.Sequential(
  43. nn.Conv2d(in_planes, self.expansion * planes,
  44. kernel_size=1, stride=stride, bias=False),
  45. nn.BatchNorm2d(self.expansion * planes)
  46. )
  47. def forward(self, x):
  48. out = F.relu(self.bn1(self.conv1(x)))
  49. out = F.relu(self.bn2(self.conv2(out)))
  50. out = self.bn3(self.conv3(out))
  51. out += self.shortcut(x)
  52. out = F.relu(out)
  53. return out
  54. class Model(BaseModel):
  55. """ResNet18 model
  56. Note two main differences from official pytorch version:
  57. 1. conv1 kernel size: pytorch version uses kernel_size=7
  58. 2. average pooling: pytorch version uses AdaptiveAvgPool
  59. """
  60. def __init__(self, block=BasicBlock, num_blocks=[2, 2, 2, 2], num_classes=10):
  61. super(Model, self).__init__()
  62. self.in_planes = 64
  63. self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
  64. self.bn1 = nn.BatchNorm2d(64)
  65. self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
  66. self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
  67. self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
  68. self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
  69. self.linear = nn.Linear(512 * block.expansion, num_classes)
  70. def _make_layer(self, block, planes, num_blocks, stride):
  71. strides = [stride] + [1] * (num_blocks - 1)
  72. layers = []
  73. for stride in strides:
  74. layers.append(block(self.in_planes, planes, stride))
  75. self.in_planes = planes * block.expansion
  76. return nn.Sequential(*layers)
  77. def forward(self, x):
  78. out = F.relu(self.bn1(self.conv1(x)))
  79. out = self.layer1(out)
  80. out = self.layer2(out)
  81. out = self.layer3(out)
  82. out = self.layer4(out)
  83. out = F.avg_pool2d(out, 4)
  84. out = out.view(out.size(0), -1)
  85. out = self.linear(out)
  86. return out