Skip to content

Commit

Permalink
Add type hints for M2M (#18998)
Browse files Browse the repository at this point in the history
* added type hints

* fixed typo
  • Loading branch information
daspartho authored Sep 13, 2022
1 parent 4bd36f1 commit 2848c9c
Showing 1 changed file with 20 additions and 20 deletions.
40 changes: 20 additions & 20 deletions src/transformers/models/m2m_100/modeling_m2m_100.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import math
import random
from typing import Optional, Tuple, Union
from typing import List, Optional, Tuple, Union

import torch
from torch import nn
Expand Down Expand Up @@ -712,13 +712,13 @@ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] =

def forward(
self,
input_ids=None,
attention_mask=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
):
r"""
Args:
Expand Down Expand Up @@ -887,18 +887,18 @@ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] =

def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
cross_attn_head_mask: Optional[torch.Tensor] = None,
past_key_values: Optional[List[torch.FloatTensor]] = None,
inputs_embeds: Optional[torch.Tensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
):
r"""
Args:
Expand Down

0 comments on commit 2848c9c

Please sign in to comment.