From b9efe68d3c497d4e76126a27ac791dcebf6f24f3 Mon Sep 17 00:00:00 2001 From: thanhvc3 Date: Sat, 27 Apr 2024 11:12:52 +0700 Subject: [PATCH] try swin --- models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models.py b/models.py index deb2338..73c5f3b 100644 --- a/models.py +++ b/models.py @@ -800,7 +800,7 @@ class WindowAttention(nn.Module): # cosine attention attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) - logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01))).exp() + logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).cuda()).exp() attn = attn * logit_scale relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)