Skip to content

Commit

Permalink
Fix drop/drop_path arg on MLP-Mixer model. Fix #641
Browse files Browse the repository at this point in the history
  • Loading branch information
rwightman committed May 16, 2021
1 parent 7077f16 commit e7f0db8
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions timm/models/mlp_mixer.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ def __init__(
mlp_layer=Mlp,
norm_layer=partial(nn.LayerNorm, eps=1e-6),
act_layer=nn.GELU,
drop=0.,
drop_path=0.,
drop_rate=0.,
drop_path_rate=0.,
nlhb=False,
):
super().__init__()
Expand All @@ -108,7 +108,7 @@ def __init__(
self.blocks = nn.Sequential(*[
MixerBlock(
hidden_dim, self.stem.num_patches, tokens_dim, channels_dim,
mlp_layer=mlp_layer, norm_layer=norm_layer, act_layer=act_layer, drop=drop, drop_path=drop_path)
mlp_layer=mlp_layer, norm_layer=norm_layer, act_layer=act_layer, drop=drop_rate, drop_path=drop_path_rate)
for _ in range(num_blocks)])
self.norm = norm_layer(hidden_dim)
self.head = nn.Linear(hidden_dim, self.num_classes) # zero init
Expand Down

0 comments on commit e7f0db8

Please sign in to comment.