From 22d44d1a99fc4f7a33e6311eb8f6124b4c5b2d43 Mon Sep 17 00:00:00 2001 From: thanhvc3 Date: Sat, 27 Apr 2024 10:32:08 +0700 Subject: [PATCH] try swin --- models.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/models.py b/models.py index e603f85..7b49bcd 100644 --- a/models.py +++ b/models.py @@ -786,6 +786,7 @@ class WindowAttention(nn.Module): x: input features with shape of (num_windows*B, N, C) mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None """ + print(x.shape) B_, N, C = x.shape qkv_bias = None if self.q_bias is not None: @@ -862,7 +863,7 @@ class PoolFormerBlock(nn.Module): self.norm1 = norm_layer(dim) #self.token_mixer = Pooling(pool_size=pool_size) # self.token_mixer = FNetBlock() - self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(7), num_heads=1, pretrained_window_size=[5,5]) + self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(7), num_heads=10, pretrained_window_size=[5,5]) self.norm2 = norm_layer(dim) mlp_hidden_dim = int(dim * mlp_ratio) self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,