Skip to content

Commit

Permalink
nit fixing the whisper tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Sep 25, 2024
1 parent c798599 commit 3d8405d
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion src/transformers/cache_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1430,7 +1430,7 @@ def get_seq_length(self, layer_idx: Optional[int] = 0) -> int:
# check if empty list because in case of static cache it will be a tensors and we can't check `if not torch.Tensor`
if self.self_attention_cache.key_cache == []:
return 0
if len(self.self_attention_cache.key_cache) > 1 and self.key_cache[layer_idx] == []:
if len(self.self_attention_cache.key_cache) > 1 and self.self_attention_cache.key_cache[layer_idx] == []:
return 0
return (self.self_attention_cache.key_cache[layer_idx][0, 0].any(dim=-1)).sum()

Expand Down

0 comments on commit 3d8405d

Please sign in to comment.