Skip to content

Commit

Permalink
remove unused activation dropout (huggingface#18842)
Browse files Browse the repository at this point in the history
  • Loading branch information
shijie-wu authored and oneraghavan committed Sep 26, 2022
1 parent 72cdede commit 1eabbe1
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 6 deletions.
4 changes: 0 additions & 4 deletions src/transformers/models/opt/configuration_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ class OPTConfig(PretrainedConfig):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
attention_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for the attention probabilities.
activation_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for activations inside the fully connected layer.
layerdrop: (`float`, *optional*, defaults to 0.0):
The LayerDrop probability. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556) for more
details.
Expand Down Expand Up @@ -106,7 +104,6 @@ def __init__(
word_embed_proj_dim=None,
dropout=0.1,
attention_dropout=0.0,
activation_dropout=0.0,
num_attention_heads=12,
activation_function="relu",
layerdrop=0.0,
Expand All @@ -132,7 +129,6 @@ def __init__(
self.num_hidden_layers = num_hidden_layers
self.dropout = dropout
self.attention_dropout = attention_dropout
self.activation_dropout = activation_dropout
self.activation_function = activation_function
self.init_std = init_std
self.layerdrop = layerdrop
Expand Down
2 changes: 0 additions & 2 deletions src/transformers/models/opt/modeling_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,8 +281,6 @@ def __init__(self, config: OPTConfig):
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]

self.activation_dropout = config.activation_dropout

self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim)
self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim)
Expand Down

0 comments on commit 1eabbe1

Please sign in to comment.