Skip to content

Commit

Permalink
xx: change back downsampler1d conv to 2d (pseudo), make the whole nec…
Browse files Browse the repository at this point in the history
…k pseudo 2D
  • Loading branch information
makecent committed Jun 29, 2023
1 parent b846003 commit 90195fb
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 15 deletions.
1 change: 0 additions & 1 deletion configs/tadtr_my.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
kernel_size=1,
out_channels=256,
act_cfg=None,
conv_cfg=dict(type='Conv1d'),
norm_cfg=dict(type='GN', num_groups=32),
num_outs=4)
# dict(type='FPN',
Expand Down
7 changes: 0 additions & 7 deletions my_modules/detector/deformable_detr.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,6 @@ def _init_layers(self) -> None:
if not self.as_two_stage:
self.reference_points_fc = Pseudo2DLinear(self.embed_dims, 1)

def extract_feat(self, batch_inputs: Tensor) -> Tuple[Tensor]:
x = self.backbone(batch_inputs)
if self.with_neck:
x = self.neck(x)
# insert the pseudo height dimension
x = tuple(i.unsqueeze(2) for i in x)
return x

def forward_transformer(self,
img_feats: Tuple[Tensor],
Expand Down
13 changes: 6 additions & 7 deletions my_modules/neck/temporal_downsampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ def __init__(self,
num_levels=4,
in_channels=2048,
out_channels=512,
kernel_sizes=3,
strides=2,
paddings=1,
kernel_sizes=(1, 3),
strides=(1, 2),
paddings=(0, 1),
out_indices=(0, 1, 2, 3),
mask=False,
):
Expand All @@ -36,7 +36,7 @@ def __init__(self,
kernel_sizes,
strides,
paddings,
conv_cfg=dict(type='Conv1d'),
conv_cfg=dict(type='Conv2d'),
norm_cfg=dict(type='SyncBN'),
act_cfg=dict(type='ReLU')))
in_channels = out_channels
Expand All @@ -61,16 +61,15 @@ def __init__(self,

def forward(self, x):
# x: N, C, 1, T
x = x.squeeze(2)

if self.mask:
B, C, T = x.size()
B, C, _, T = x.size()
mask = x.new_zeros((B, 1, T))
for feat_id, feat in enumerate(x):
# find valid feature length for each sample in the batch
# feat is of shape (C, T), we find the tails that are all zeros and take it as the padding
valid_feat_len = T - (feat == 0).all(dim=0).sum()
mask[feat_id, :, :valid_feat_len] = 1
mask[feat_id, :, :, :valid_feat_len] = 1

outs = []
if 0 in self.out_indices:
Expand Down

0 comments on commit 90195fb

Please sign in to comment.