Skip to content

Commit

Permalink
Super tiny fix 12 typos about "with with" (#29926)
Browse files Browse the repository at this point in the history
* with with

* style
  • Loading branch information
fzyzcjy authored and Ita Zaporozhets committed May 14, 2024
1 parent 9d26e3b commit d1f8067
Show file tree
Hide file tree
Showing 12 changed files with 13 additions and 21 deletions.
2 changes: 1 addition & 1 deletion docs/source/en/model_doc/fastspeech2_conformer.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ This model was contributed by [Connor Henderson](https://huggingface.co/connor-h


## 🤗 Model Architecture
FastSpeech2's general structure with a Mel-spectrogram decoder was implemented, and the traditional transformer blocks were replaced with with conformer blocks as done in the ESPnet library.
FastSpeech2's general structure with a Mel-spectrogram decoder was implemented, and the traditional transformer blocks were replaced with conformer blocks as done in the ESPnet library.

#### FastSpeech2 Model Architecture
![FastSpeech2 Model Architecture](https://www.microsoft.com/en-us/research/uploads/prod/2021/04/fastspeech2-1.png)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/align/modeling_align.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def forward(self, hidden_states: torch.FloatTensor) -> torch.Tensor:
return hidden_states


# Copied from transformers.models.efficientnet.modeling_efficientnet.EfficientNetDepthwiseLayer with with EfficientNet->AlignVision
# Copied from transformers.models.efficientnet.modeling_efficientnet.EfficientNetDepthwiseLayer with EfficientNet->AlignVision
class AlignVisionDepthwiseLayer(nn.Module):
r"""
This corresponds to the depthwise convolution phase of each block in the original implementation.
Expand Down Expand Up @@ -440,7 +440,7 @@ def forward(self, hidden_states: torch.FloatTensor) -> torch.Tensor:
return hidden_states


# Copied from transformers.models.efficientnet.modeling_efficientnet.EfficientNetSqueezeExciteLayer with with EfficientNet->AlignVision
# Copied from transformers.models.efficientnet.modeling_efficientnet.EfficientNetSqueezeExciteLayer with EfficientNet->AlignVision
class AlignVisionSqueezeExciteLayer(nn.Module):
r"""
This corresponds to the Squeeze and Excitement phase of each block in the original implementation.
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/bart/modeling_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -2093,7 +2093,7 @@ def forward(self, *args, **kwargs):

@add_start_docstrings(
"""
BART decoder with with a language modeling head on top (linear layer with weights tied to the input embeddings).
BART decoder with a language modeling head on top (linear layer with weights tied to the input embeddings).
""",
BART_START_DOCSTRING,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/falcon/configuration_falcon.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,8 +177,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/fuyu/configuration_fuyu.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/gpt_neox/configuration_gpt_neox.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/llama/configuration_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/persimmon/configuration_persimmon.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/phi/configuration_phi.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
3 changes: 1 addition & 2 deletions src/transformers/models/stablelm/configuration_stablelm.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,8 +168,7 @@ def _rope_scaling_validation(self):

if not isinstance(self.rope_scaling, dict) or len(self.rope_scaling) != 2:
raise ValueError(
"`rope_scaling` must be a dictionary with with two fields, `type` and `factor`, "
f"got {self.rope_scaling}"
"`rope_scaling` must be a dictionary with two fields, `type` and `factor`, " f"got {self.rope_scaling}"
)
rope_scaling_type = self.rope_scaling.get("type", None)
rope_scaling_factor = self.rope_scaling.get("factor", None)
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/whisper/modeling_whisper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1861,7 +1861,7 @@ def forward(self, *args, **kwargs):

@add_start_docstrings(
"""
Whisper decoder with with a language modeling head on top (linear layer with weights tied to the input embeddings).
Whisper decoder with a language modeling head on top (linear layer with weights tied to the input embeddings).
""",
WHISPER_START_DOCSTRING,
)
Expand Down

0 comments on commit d1f8067

Please sign in to comment.