Skip to content

Commit

Permalink
add kwargs to transformer
Browse files Browse the repository at this point in the history
  • Loading branch information
LYM-fire committed Sep 20, 2022
1 parent 2edd0c9 commit a265c1a
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions mmdet/models/layers/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def inverse_sigmoid(x, eps=1e-5):

class DetrTransformerEncoder(BaseModule):

def __init__(self, layer_cfg=None, num_layers=None, init_cfg=None):
def __init__(self, layer_cfg=None, num_layers=None, init_cfg=None, **kwargs):

super().__init__(init_cfg=init_cfg)
if isinstance(layer_cfg, dict):
Expand Down Expand Up @@ -423,7 +423,8 @@ def __init__(self,
num_layers=None,
post_norm_cfg=dict(type='LN'),
return_intermediate=True,
init_cfg=None):
init_cfg=None,
**kwargs):
super().__init__(init_cfg=init_cfg)
if isinstance(layer_cfg, dict):
layer_cfg = [copy.deepcopy(layer_cfg) for _ in range(num_layers)]
Expand Down Expand Up @@ -475,7 +476,8 @@ def __init__(self,
act_cfg=dict(type='ReLU', inplace=True)),
norm_cfg=dict(type='LN'),
init_cfg=None,
batch_first=False):
batch_first=False,
**kwargs):

super().__init__(init_cfg=init_cfg)
if 'batch_first' in self_attn_cfg: # TODO
Expand Down Expand Up @@ -535,7 +537,8 @@ def __init__(self,
),
norm_cfg=dict(type='LN'),
init_cfg=None,
batch_first=False):
batch_first=False,
**kwargs):

super().__init__(init_cfg=init_cfg)
for attn_cfg in (self_attn_cfg, cross_attn_cfg):
Expand Down Expand Up @@ -635,7 +638,8 @@ def __init__(self,
with_proj=True,
act_cfg=dict(type='ReLU', inplace=True),
norm_cfg=dict(type='LN'),
init_cfg=None):
init_cfg=None,
**kwargs):
super(DynamicConv, self).__init__(init_cfg)
self.in_channels = in_channels
self.feat_channels = feat_channels
Expand Down

0 comments on commit a265c1a

Please sign in to comment.