Skip to content

Commit

Permalink
remove unused is_decoder parameter in DetrAttention (#24226)
Browse files Browse the repository at this point in the history
* issue#24161 remove unused is_decoder parameter in DetrAttention

* #24161 fix check_repository_consistency fail
  • Loading branch information
JayL0321 authored Jun 15, 2023
1 parent 33196b4 commit a611ac9
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -534,7 +534,6 @@ def __init__(
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
Expand Down
3 changes: 0 additions & 3 deletions src/transformers/models/detr/modeling_detr.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,6 @@ def __init__(
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
Expand Down Expand Up @@ -697,7 +696,6 @@ def __init__(self, config: DetrConfig):
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
Expand All @@ -708,7 +706,6 @@ def __init__(self, config: DetrConfig):
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
Expand Down
3 changes: 0 additions & 3 deletions src/transformers/models/maskformer/modeling_maskformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,6 @@ def __init__(
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
Expand Down Expand Up @@ -545,7 +544,6 @@ def __init__(self, config: DetrConfig):
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
Expand All @@ -556,7 +554,6 @@ def __init__(self, config: DetrConfig):
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,6 @@ def __init__(
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
Expand Down Expand Up @@ -642,7 +641,6 @@ def __init__(self, config: TableTransformerConfig):
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
Expand All @@ -653,7 +651,6 @@ def __init__(self, config: TableTransformerConfig):
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
Expand Down

0 comments on commit a611ac9

Please sign in to comment.