We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 3fb17d2 commit 60c872dCopy full SHA for 60c872d
docs/source/models/supported_models.md
@@ -101,7 +101,7 @@ class MyAttention(nn.Module):
101
102
def forward(self, hidden_states, **kwargs): # <- kwargs are required
103
...
104
- attention_interface = attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
+ attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
105
attn_output, attn_weights = attention_interface(
106
self,
107
query_states,
0 commit comments