From 6ec8560436159db49fa8d209267d6feaf6d8c43b Mon Sep 17 00:00:00 2001 From: Arun Date: Wed, 7 Aug 2024 11:35:46 +0530 Subject: [PATCH] Update hieradet.py Not used head_dim = dim_out // num_heads self.scale = head_dim**-0.5 F.scaled_dot_product_attention takes care of this automatically. --- sam2/modeling/backbones/hieradet.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sam2/modeling/backbones/hieradet.py b/sam2/modeling/backbones/hieradet.py index 1ae7d4c..52051ff 100644 --- a/sam2/modeling/backbones/hieradet.py +++ b/sam2/modeling/backbones/hieradet.py @@ -46,11 +46,8 @@ class MultiScaleAttention(nn.Module): self.dim = dim self.dim_out = dim_out - self.num_heads = num_heads - head_dim = dim_out // num_heads - self.scale = head_dim**-0.5 - + self.q_pool = q_pool self.qkv = nn.Linear(dim, dim_out * 3) self.proj = nn.Linear(dim_out, dim_out)