Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add validation for maximum sequence length in modeling_whisper.py #33196

Merged
merged 24 commits into from
Sep 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
3056095
Add validation for maximum sequence length in modeling_whisper.py
AmirMohammadFakhimi Aug 29, 2024
4330cc3
Change exception message in src/transformers/models/whisper/modeling_…
AmirMohammadFakhimi Sep 4, 2024
f12a3c2
Change 448 to config.max_target_positions in src/transformers/models/…
AmirMohammadFakhimi Sep 4, 2024
264cd5b
Change method's documentation in src/transformers/models/whisper/mode…
AmirMohammadFakhimi Sep 4, 2024
cd4db32
Add test for maximum label's sequence length in test_modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
03db15f
Merge branch 'huggingface:main' into patch-1
AmirMohammadFakhimi Sep 4, 2024
53dea49
Add self to modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
933a058
Update test_modeling_whisper.py with respect to automatic validations
AmirMohammadFakhimi Sep 4, 2024
2be9b27
Update modeling_whisper.py with respect to ci/circleci: check_code_qu…
AmirMohammadFakhimi Sep 4, 2024
9c15e68
Update test_modeling_whisper.py with respect to ci/circleci: check_co…
AmirMohammadFakhimi Sep 4, 2024
81d704e
Update test_modeling_whisper.py with respect to ci/circleci: tests_ge…
AmirMohammadFakhimi Sep 4, 2024
f5ae7ec
Update test_modeling_whisper.py with respect to ci/circleci: tests_ge…
AmirMohammadFakhimi Sep 4, 2024
b586686
Update test_modeling_whisper.py with respect to ci/circleci: check_co…
AmirMohammadFakhimi Sep 4, 2024
8aa4832
Separate test_labels_sequence_max_length tests in test_modeling_whisp…
AmirMohammadFakhimi Sep 4, 2024
8bf582f
Update test_modeling_whisper.py with respect to ci/circleci: check_co…
AmirMohammadFakhimi Sep 4, 2024
13baea3
Remove assert from test_modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
be3d4d5
Add max_target_positions to WhisperModelTester in test_modeling_whisp…
AmirMohammadFakhimi Sep 4, 2024
6cf4d8d
Update test_modeling_whisper.py with respect to ci/circleci: check_co…
AmirMohammadFakhimi Sep 4, 2024
930a933
Update test_modeling_whisper.py with respect to ci/circleci: tests_ge…
AmirMohammadFakhimi Sep 4, 2024
fb1cd5e
Update test_modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
88cd92f
Change test_labels_sequence_max_length_error_after_changing_config in…
AmirMohammadFakhimi Sep 4, 2024
4b677d5
Change self.config.max_target_positions to self.max_target_positions …
AmirMohammadFakhimi Sep 4, 2024
bab1098
Add new tests in test_modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
de273c0
Update test_modeling_whisper.py
AmirMohammadFakhimi Sep 4, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion src/transformers/models/whisper/modeling_whisper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1671,6 +1671,7 @@ def __init__(self, config: WhisperConfig):
super().__init__(config)
self.model = WhisperModel(config)
self.proj_out = nn.Linear(config.d_model, config.vocab_size, bias=False)
self.max_target_positions = config.max_target_positions

# Initialize weights and apply final processing
self.post_init()
Expand Down Expand Up @@ -1723,7 +1724,7 @@ def forward(
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the language modeling loss. Indices should either be in `[0, ..., config.vocab_size]`
or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is
only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
only computed for the tokens with labels in `[0, ..., config.vocab_size]`. `sequence_length` should be smaller than or equal to `config.max_target_positions`.

Returns:

Expand Down Expand Up @@ -1751,6 +1752,10 @@ def forward(
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

if labels is not None:
if labels.shape[1] > self.max_target_positions:
raise ValueError(
f"Labels' sequence length {labels.shape[1]} cannot exceed the maximum allowed length of {self.max_target_positions} tokens."
)
AmirMohammadFakhimi marked this conversation as resolved.
Show resolved Hide resolved
if decoder_input_ids is None and decoder_inputs_embeds is None:
decoder_input_ids = shift_tokens_right(
labels, self.config.pad_token_id, self.config.decoder_start_token_id
Expand Down
57 changes: 57 additions & 0 deletions tests/models/whisper/test_modeling_whisper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1676,6 +1676,63 @@ def test_flash_attn_2_generate_reuse_cache(self):
past_key_values=past_key_values,
)

def test_labels_sequence_max_length_correct(self):
config, input_dict = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_generative_model_classes:
input_features = input_dict["input_features"]

labels_length = config.max_target_positions
labels = torch.ones(1, labels_length, dtype=torch.int64)

model = model_class(config)
model(input_features=input_features, labels=labels)

def test_labels_sequence_max_length_correct_after_changing_config(self):
config, input_dict = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_generative_model_classes:
input_features = input_dict["input_features"]

config.max_target_positions += 100

labels_length = config.max_target_positions
labels = torch.ones(1, labels_length, dtype=torch.int64)

model = model_class(config)
model(input_features=input_features, labels=labels)

def test_labels_sequence_max_length_error(self):
config, input_dict = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_generative_model_classes:
input_features = input_dict["input_features"]

labels_length = config.max_target_positions + 1
labels = torch.ones(1, labels_length, dtype=torch.int64)

model = model_class(config)
with self.assertRaises(ValueError):
model(input_features=input_features, labels=labels)

def test_labels_sequence_max_length_error_after_changing_config(self):
config, input_dict = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_generative_model_classes:
model = model_class(config)
input_features = input_dict["input_features"]

labels_length = config.max_target_positions + 1
labels = torch.ones(1, labels_length, dtype=torch.int64)

new_max_length = config.max_target_positions + 100
model.config.max_length = new_max_length
model.generation_config.max_length = new_max_length
config.max_target_positions = new_max_length

with self.assertRaises(ValueError):
model(input_features=input_features, labels=labels)


@require_torch
@require_torchaudio
Expand Down
Loading