We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c3a722f commit 87efc68Copy full SHA for 87efc68
vllm/model_executor/layers/rotary_embedding/llama4_vision_rope.py
@@ -78,3 +78,10 @@ def forward_cuda( # type: ignore[override]
78
key: torch.Tensor | None = None,
79
) -> tuple[torch.Tensor, torch.Tensor | None]:
80
return self.forward_native(query, key)
81
+
82
+ def forward_hip( # type: ignore[override]
83
+ self,
84
+ query: torch.Tensor,
85
+ key: torch.Tensor | None = None,
86
+ ) -> tuple[torch.Tensor, torch.Tensor | None]:
87
+ return self.forward_native(query, key)
0 commit comments