Skip to content

Commit

Permalink
fix: Fixed ReXNet implementation (#58)
Browse files Browse the repository at this point in the history
Fixed SEBlock biases and RexBlock strides.
  • Loading branch information
frgfm authored Jul 21, 2020
1 parent ce08e60 commit 279c05d
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions holocron/models/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,10 +180,10 @@ def __init__(self, channels, se_ratio=12, act_layer=None, norm_layer=None, drop_
super().__init__()
self.pool = nn.AdaptiveAvgPool2d(1)
self.conv = nn.Sequential(
*_conv_sequence(channels, channels // se_ratio, act_layer, norm_layer, drop_layer, kernel_size=1,
stride=1, bias=False),
*_conv_sequence(channels // se_ratio, channels, nn.Sigmoid(), None, drop_layer, kernel_size=1,
stride=1, bias=False))
*_conv_sequence(channels, channels // se_ratio, act_layer, norm_layer, drop_layer,
kernel_size=1, stride=1),
*_conv_sequence(channels // se_ratio, channels, nn.Sigmoid(), None, drop_layer,
kernel_size=1, stride=1))

def forward(self, x):

Expand All @@ -198,7 +198,7 @@ def __init__(self, in_channels, channels, t, stride, use_se=True, se_ratio=12,
super().__init__()

if act_layer is None:
act_layer = nn.ReLU6()
act_layer = nn.ReLU6(inplace=True)

if norm_layer is None:
norm_layer = nn.BatchNorm2d
Expand All @@ -216,7 +216,7 @@ def __init__(self, in_channels, channels, t, stride, use_se=True, se_ratio=12,
dw_channels = in_channels

_layers.extend(_conv_sequence(dw_channels, dw_channels, None, norm_layer, drop_layer, kernel_size=3,
stride=1, padding=1, bias=False))
stride=stride, padding=1, bias=False, groups=dw_channels))

if use_se:
_layers.append(SEBlock(dw_channels, se_ratio, act_layer, norm_layer, drop_layer))
Expand Down

0 comments on commit 279c05d

Please sign in to comment.