Skip to content

Commit

Permalink
make fixup
Browse files Browse the repository at this point in the history
  • Loading branch information
Rocketknight1 committed Dec 4, 2023
1 parent 4492de7 commit 62ab76e
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/transformers/models/t5/modeling_tf_t5.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,7 +641,9 @@ def __init__(self, config, embed_tokens=None, **kwargs):
TFT5Block(config, has_relative_attention_bias=bool(i == 0), name=f"block_._{i}")
for i in range(config.num_layers)
]
self.final_layer_norm = TFT5LayerNorm(config.d_model, epsilon=config.layer_norm_epsilon, name="final_layer_norm")
self.final_layer_norm = TFT5LayerNorm(
config.d_model, epsilon=config.layer_norm_epsilon, name="final_layer_norm"
)
self.dropout = tf.keras.layers.Dropout(config.dropout_rate)

def _prune_heads(self, heads_to_prune):
Expand Down

0 comments on commit 62ab76e

Please sign in to comment.