-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathresnet.py
168 lines (147 loc) · 6.81 KB
/
resnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
# From tinygrad/extra/models/resnet.py
#
# Copyright (c) 2023 George Hotz
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
import tinygrad.nn as nn
from tinygrad.tensor import Tensor
from tinygrad.nn.state import torch_load
from tinygrad.helpers import fetch, get_child
class BasicBlock:
expansion = 1
def __init__(self, in_planes, planes, stride=1, groups=1, base_width=64):
assert groups == 1 and base_width == 64, "BasicBlock only supports groups=1 and base_width=64"
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1, stride=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = []
if stride != 1 or in_planes != self.expansion*planes:
self.downsample = [
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
]
def __call__(self, x):
out = self.bn1(self.conv1(x))
out = out.relu()
out = self.bn2(self.conv2(out))
out = out + x.sequential(self.downsample)
out = out.relu()
return out
class Bottleneck:
# NOTE: stride_in_1x1=False, this is the v1.5 variant
expansion = 4
def __init__(self, in_planes, planes, stride=1, stride_in_1x1=False, groups=1, base_width=64):
width = int(planes * (base_width / 64.0)) * groups
# NOTE: the original implementation places stride at the first convolution (self.conv1), control with stride_in_1x1
self.conv1 = nn.Conv2d(in_planes, width, kernel_size=1, stride=stride if stride_in_1x1 else 1, bias=False)
self.bn1 = nn.BatchNorm2d(width)
self.conv2 = nn.Conv2d(width, width, kernel_size=3, padding=1, stride=1 if stride_in_1x1 else stride, groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(width)
self.conv3 = nn.Conv2d(width, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.downsample = []
if stride != 1 or in_planes != self.expansion*planes:
self.downsample = [
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
]
def __call__(self, x):
out = self.bn1(self.conv1(x)).relu()
out = self.bn2(self.conv2(out)).relu()
out = self.bn3(self.conv3(out))
out = out + x.sequential(self.downsample)
out = out.relu()
return out
class ResNet:
def __init__(self, num, num_classes=None, groups=1, width_per_group=64, stride_in_1x1=False):
self.num = num
self.block = {
18: BasicBlock,
34: BasicBlock,
50: Bottleneck,
101: Bottleneck,
152: Bottleneck
}[num]
self.num_blocks = {
18: [2,2,2,2],
34: [3,4,6,3],
50: [3,4,6,3],
101: [3,4,23,3],
152: [3,8,36,3]
}[num]
self.in_planes = 64
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, bias=False, padding=3)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(self.block, 64, self.num_blocks[0], stride=1, stride_in_1x1=stride_in_1x1)
self.layer2 = self._make_layer(self.block, 128, self.num_blocks[1], stride=2, stride_in_1x1=stride_in_1x1)
self.layer3 = self._make_layer(self.block, 256, self.num_blocks[2], stride=2, stride_in_1x1=stride_in_1x1)
self.layer4 = self._make_layer(self.block, 512, self.num_blocks[3], stride=2, stride_in_1x1=stride_in_1x1)
self.fc = nn.Linear(512 * self.block.expansion, num_classes) if num_classes is not None else None
def _make_layer(self, block, planes, num_blocks, stride, stride_in_1x1):
strides = [stride] + [1] * (num_blocks-1)
layers = []
for stride in strides:
if block == Bottleneck:
layers.append(block(self.in_planes, planes, stride, stride_in_1x1, self.groups, self.base_width))
else:
layers.append(block(self.in_planes, planes, stride, self.groups, self.base_width))
self.in_planes = planes * block.expansion
return layers
def forward(self, x):
is_feature_only = self.fc is None
if is_feature_only: features = []
out = self.bn1(self.conv1(x)).relu()
out = out.pad2d([1,1,1,1]).max_pool2d((3,3), 2)
out = out.sequential(self.layer1)
if is_feature_only: features.append(out)
out = out.sequential(self.layer2)
if is_feature_only: features.append(out)
out = out.sequential(self.layer3)
if is_feature_only: features.append(out)
out = out.sequential(self.layer4)
if is_feature_only: features.append(out)
if not is_feature_only:
out = out.mean([2,3])
out = self.fc(out).log_softmax()
return out
return features
def __call__(self, x:Tensor) -> Tensor:
return self.forward(x)
def load_from_pretrained(self):
# TODO replace with fake torch load
model_urls = {
(18, 1, 64): 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
(34, 1, 64): 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
(50, 1, 64): 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
(50, 32, 4): 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',
(101, 1, 64): 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
(152, 1, 64): 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
self.url = model_urls[(self.num, self.groups, self.base_width)]
for k, v in torch_load(fetch(self.url)).items():
if 'fc.' in k and self.fc == None:
print("skipping fully connected layer")
continue # Skip FC if transfer learning
obj: Tensor = get_child(self, k)
dat = v.detach().numpy()
# TODO: remove or when #777 is merged
assert obj.shape == dat.shape or (obj.shape == (1,) and dat.shape == ()), (k, obj.shape, dat.shape)
obj.assign(dat)
ResNet18 = lambda num_classes=1000: ResNet(18, num_classes=num_classes)
ResNet34 = lambda num_classes=1000: ResNet(34, num_classes=num_classes)
ResNet50 = lambda num_classes=1000: ResNet(50, num_classes=num_classes)
ResNet101 = lambda num_classes=1000: ResNet(101, num_classes=num_classes)
ResNet152 = lambda num_classes=1000: ResNet(152, num_classes=num_classes)
ResNeXt50_32X4D = lambda num_classes=1000: ResNet(50, num_classes=num_classes, groups=32, width_per_group=4)