From 279c05d96c52f6539dc64d376ba30d11a39e8be5 Mon Sep 17 00:00:00 2001 From: F-G Fernandez Date: Tue, 21 Jul 2020 19:45:34 +0200 Subject: [PATCH] fix: Fixed ReXNet implementation (#58) Fixed SEBlock biases and RexBlock strides. --- holocron/models/resnet.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/holocron/models/resnet.py b/holocron/models/resnet.py index a0e584df7..dc94a7731 100644 --- a/holocron/models/resnet.py +++ b/holocron/models/resnet.py @@ -180,10 +180,10 @@ def __init__(self, channels, se_ratio=12, act_layer=None, norm_layer=None, drop_ super().__init__() self.pool = nn.AdaptiveAvgPool2d(1) self.conv = nn.Sequential( - *_conv_sequence(channels, channels // se_ratio, act_layer, norm_layer, drop_layer, kernel_size=1, - stride=1, bias=False), - *_conv_sequence(channels // se_ratio, channels, nn.Sigmoid(), None, drop_layer, kernel_size=1, - stride=1, bias=False)) + *_conv_sequence(channels, channels // se_ratio, act_layer, norm_layer, drop_layer, + kernel_size=1, stride=1), + *_conv_sequence(channels // se_ratio, channels, nn.Sigmoid(), None, drop_layer, + kernel_size=1, stride=1)) def forward(self, x): @@ -198,7 +198,7 @@ def __init__(self, in_channels, channels, t, stride, use_se=True, se_ratio=12, super().__init__() if act_layer is None: - act_layer = nn.ReLU6() + act_layer = nn.ReLU6(inplace=True) if norm_layer is None: norm_layer = nn.BatchNorm2d @@ -216,7 +216,7 @@ def __init__(self, in_channels, channels, t, stride, use_se=True, se_ratio=12, dw_channels = in_channels _layers.extend(_conv_sequence(dw_channels, dw_channels, None, norm_layer, drop_layer, kernel_size=3, - stride=1, padding=1, bias=False)) + stride=stride, padding=1, bias=False, groups=dw_channels)) if use_se: _layers.append(SEBlock(dw_channels, se_ratio, act_layer, norm_layer, drop_layer))