We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 77835fd commit 1277999Copy full SHA for 1277999
tests/compile/test_fusion_attn.py
@@ -1,7 +1,6 @@
1
# SPDX-License-Identifier: Apache-2.0
2
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
3
import copy
4
-from typing import Optional
5
6
import pytest
7
import torch._dynamo
@@ -39,10 +38,6 @@
39
38
FP8_DTYPE = current_platform.fp8_dtype()
40
FP4_DTYPE = torch.uint8
41
42
-# globals needed for string-import custom Dynamo backend field
43
-backend: Optional[TestBackend] = None
44
-backend_unfused: Optional[TestBackend] = None
45
-
46
47
class AttentionQuantPatternModel(torch.nn.Module):
48
"""Base model for AttentionQuantPattern fusion."""
0 commit comments