try swin
This commit is contained in:
parent
63ccb4ec75
commit
22d44d1a99
@ -786,6 +786,7 @@ class WindowAttention(nn.Module):
|
|||||||
x: input features with shape of (num_windows*B, N, C)
|
x: input features with shape of (num_windows*B, N, C)
|
||||||
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
|
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
|
||||||
"""
|
"""
|
||||||
|
print(x.shape)
|
||||||
B_, N, C = x.shape
|
B_, N, C = x.shape
|
||||||
qkv_bias = None
|
qkv_bias = None
|
||||||
if self.q_bias is not None:
|
if self.q_bias is not None:
|
||||||
@ -862,7 +863,7 @@ class PoolFormerBlock(nn.Module):
|
|||||||
self.norm1 = norm_layer(dim)
|
self.norm1 = norm_layer(dim)
|
||||||
#self.token_mixer = Pooling(pool_size=pool_size)
|
#self.token_mixer = Pooling(pool_size=pool_size)
|
||||||
# self.token_mixer = FNetBlock()
|
# self.token_mixer = FNetBlock()
|
||||||
self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(7), num_heads=1, pretrained_window_size=[5,5])
|
self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(7), num_heads=10, pretrained_window_size=[5,5])
|
||||||
self.norm2 = norm_layer(dim)
|
self.norm2 = norm_layer(dim)
|
||||||
mlp_hidden_dim = int(dim * mlp_ratio)
|
mlp_hidden_dim = int(dim * mlp_ratio)
|
||||||
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
|
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
|
||||||
|
Loading…
Reference in New Issue
Block a user