try swin
This commit is contained in:
		@@ -790,6 +790,7 @@ class WindowAttention(nn.Module):
 | 
				
			|||||||
        B_, C, N, _ = x.shape
 | 
					        B_, C, N, _ = x.shape
 | 
				
			||||||
        x = x.reshape(B_, C, N * N)
 | 
					        x = x.reshape(B_, C, N * N)
 | 
				
			||||||
        B_, C, N = x.shape
 | 
					        B_, C, N = x.shape
 | 
				
			||||||
 | 
					        x = x.reshape(B_, N, C)
 | 
				
			||||||
        qkv_bias = None
 | 
					        qkv_bias = None
 | 
				
			||||||
        if self.q_bias is not None:
 | 
					        if self.q_bias is not None:
 | 
				
			||||||
            qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias))
 | 
					            qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias))
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user