We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 40bee1e commit 489937cCopy full SHA for 489937c
vllm/model_executor/layers/fused_moe/layer.py
@@ -542,7 +542,7 @@ def forward_xpu(
542
logical_to_physical_map is not None or \
543
logical_replica_count is not None:
544
raise NotImplementedError("Expert load balancing is not supported "
545
- "for CPU.")
+ "for XPU.")
546
assert custom_routing_function is None
547
return layer.ipex_fusion(
548
x,
0 commit comments