Skip to content

Commit

Permalink
fix saving model w/o default config
Browse files Browse the repository at this point in the history
  • Loading branch information
ylacombe committed Sep 18, 2024
1 parent 016d538 commit 34b6e24
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 0 deletions.
1 change: 1 addition & 0 deletions src/transformers/configuration_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1033,6 +1033,7 @@ def _get_non_default_generation_parameters(self) -> Dict[str, Any]:
if decoder_config is not self:
default_config = decoder_config.__class__()
else:
default_config = None
decoder_config = None

# If it is a composite model, we want to check the subconfig that will be used for generation
Expand Down
2 changes: 2 additions & 0 deletions src/transformers/models/moshi/convert_moshi_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,8 @@ def convert_checkpoint(
original_checkpoint.update({f"audio_encoder.{key}": value for (key, value) in audio_checkpoint.items()})

model = _convert_model(original_checkpoint, model, convert_list, device, config)

# TODO: set generation config

model.save_pretrained(pytorch_dump_folder_path)

Expand Down

0 comments on commit 34b6e24

Please sign in to comment.