1. fix warnings. \n 2. support CPU mode. \n 3. update README.
This commit is contained in:
@@ -111,11 +111,11 @@ class PositionEmbeddingSineHW(nn.Module):
|
||||
x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
|
||||
|
||||
dim_tx = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device)
|
||||
dim_tx = self.temperatureW ** (2 * (dim_tx // 2) / self.num_pos_feats)
|
||||
dim_tx = self.temperatureW ** (2 * (torch.div(dim_tx, 2, rounding_mode='floor')) / self.num_pos_feats)
|
||||
pos_x = x_embed[:, :, :, None] / dim_tx
|
||||
|
||||
dim_ty = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device)
|
||||
dim_ty = self.temperatureH ** (2 * (dim_ty // 2) / self.num_pos_feats)
|
||||
dim_ty = self.temperatureH ** (2 * (torch.div(dim_ty, 2, rounding_mode='floor')) / self.num_pos_feats)
|
||||
pos_y = y_embed[:, :, :, None] / dim_ty
|
||||
|
||||
pos_x = torch.stack(
|
||||
|
@@ -25,7 +25,10 @@ from torch.autograd import Function
|
||||
from torch.autograd.function import once_differentiable
|
||||
from torch.nn.init import constant_, xavier_uniform_
|
||||
|
||||
from groundingdino import _C
|
||||
try:
|
||||
from groundingdino import _C
|
||||
except:
|
||||
warnings.warn("Failed to load custom C++ ops. Running on CPU mode Only!")
|
||||
|
||||
|
||||
# helpers
|
||||
@@ -323,6 +326,7 @@ class MultiScaleDeformableAttention(nn.Module):
|
||||
reference_points.shape[-1]
|
||||
)
|
||||
)
|
||||
|
||||
if torch.cuda.is_available() and value.is_cuda:
|
||||
halffloat = False
|
||||
if value.dtype == torch.float16:
|
||||
|
@@ -206,7 +206,7 @@ def gen_sineembed_for_position(pos_tensor):
|
||||
# sineembed_tensor = torch.zeros(n_query, bs, 256)
|
||||
scale = 2 * math.pi
|
||||
dim_t = torch.arange(128, dtype=torch.float32, device=pos_tensor.device)
|
||||
dim_t = 10000 ** (2 * (dim_t // 2) / 128)
|
||||
dim_t = 10000 ** (2 * (torch.div(dim_t, 2, rounding_mode='floor')) / 128)
|
||||
x_embed = pos_tensor[:, :, 0] * scale
|
||||
y_embed = pos_tensor[:, :, 1] * scale
|
||||
pos_x = x_embed[:, :, None] / dim_t
|
||||
|
Reference in New Issue
Block a user