File tree Expand file tree Collapse file tree 3 files changed +15
-5
lines changed Expand file tree Collapse file tree 3 files changed +15
-5
lines changed Original file line number Diff line number Diff line change @@ -818,14 +818,15 @@ def create_new_process_for_each_test(
818818
819819 Args:
820820 method: The process creation method. Can be either "spawn" or "fork".
821- If not specified,
822- it defaults to "spawn" on ROCm platforms and "fork" otherwise.
821+ If not specified, it defaults to "spawn" on ROCm and XPU
822+ platforms and "fork" otherwise.
823823
824824 Returns:
825825 A decorator to run test functions in separate processes.
826826 """
827827 if method is None :
828- method = "spawn" if current_platform .is_rocm () else "fork"
828+ use_spawn = current_platform .is_rocm () or current_platform .is_xpu ()
829+ method = "spawn" if use_spawn else "fork"
829830
830831 assert method in ["spawn" ,
831832 "fork" ], "Method must be either 'spawn' or 'fork'"
Original file line number Diff line number Diff line change 55
66from vllm import LLM , SamplingParams
77
8- from ...utils import fork_new_process_for_each_test
8+ from ...utils import create_new_process_for_each_test
99
1010
11- @fork_new_process_for_each_test
11+ @create_new_process_for_each_test ()
1212@pytest .mark .parametrize ("attn_backend" ,
1313 ["FLASH_ATTN_VLLM_V1" , "FLASHINFER_VLLM_V1" ])
1414def test_cascade_attention (example_system_message , monkeypatch , attn_backend ):
Original file line number Diff line number Diff line change @@ -1535,6 +1535,13 @@ def cuda_is_initialized() -> bool:
15351535 return torch .cuda .is_initialized ()
15361536
15371537
1538+ def xpu_is_initialized () -> bool :
1539+ """Check if XPU is initialized."""
1540+ if not torch .xpu ._is_compiled ():
1541+ return False
1542+ return torch .xpu .is_initialized ()
1543+
1544+
15381545def cuda_get_device_properties (device ,
15391546 names : Sequence [str ],
15401547 init_cuda = False ) -> tuple [Any , ...]:
@@ -2848,6 +2855,8 @@ def _maybe_force_spawn():
28482855 reason = None
28492856 if cuda_is_initialized ():
28502857 reason = "CUDA is initialized"
2858+ elif xpu_is_initialized ():
2859+ reason = "XPU is initialized"
28512860 elif is_in_ray_actor ():
28522861 # even if we choose to spawn, we need to pass the ray address
28532862 # to the subprocess so that it knows how to connect to the ray cluster.
You can’t perform that action at this time.
0 commit comments