From 3c15862b1fde4814c7d98774c990dd05af112698 Mon Sep 17 00:00:00 2001 From: ChristophReich1996 Date: Fri, 16 Jun 2023 19:49:10 +0200 Subject: [PATCH] Omit unused norm_layer from Mlp class --- timm/layers/mlp.py | 2 -- timm/models/beit.py | 1 - timm/models/eva.py | 2 -- 3 files changed, 5 deletions(-) diff --git a/timm/layers/mlp.py b/timm/layers/mlp.py index 2c3073300e..0ffa514dff 100644 --- a/timm/layers/mlp.py +++ b/timm/layers/mlp.py @@ -19,7 +19,6 @@ def __init__( hidden_features=None, out_features=None, act_layer=nn.GELU, - norm_layer=None, bias=True, drop=0., use_conv=False, @@ -34,7 +33,6 @@ def __init__( self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0]) self.act = act_layer() self.drop1 = nn.Dropout(drop_probs[0]) - self.norm = norm_layer(hidden_features) if norm_layer is not None else nn.Identity() self.fc2 = linear_layer(hidden_features, out_features, bias=bias[1]) self.drop2 = nn.Dropout(drop_probs[1]) diff --git a/timm/models/beit.py b/timm/models/beit.py index 3fd1062f5b..60cb738867 100644 --- a/timm/models/beit.py +++ b/timm/models/beit.py @@ -220,7 +220,6 @@ def __init__( in_features=dim, hidden_features=int(dim * mlp_ratio), act_layer=act_layer, - norm_layer=norm_layer if scale_mlp else None, drop=proj_drop, ) self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() diff --git a/timm/models/eva.py b/timm/models/eva.py index f0ab9c7224..13038b7f40 100644 --- a/timm/models/eva.py +++ b/timm/models/eva.py @@ -225,7 +225,6 @@ def __init__( in_features=dim, hidden_features=hidden_features, act_layer=act_layer, - norm_layer=norm_layer if scale_mlp else None, drop=proj_drop, ) self.gamma_2 = nn.Parameter(init_values * torch.ones(dim)) if init_values is not None else None @@ -319,7 +318,6 @@ def __init__( in_features=dim, hidden_features=hidden_features, act_layer=act_layer, - norm_layer=norm_layer if scale_mlp else None, drop=proj_drop, ) self.norm2 = norm_layer(dim)