We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1aa162e commit c091c0aCopy full SHA for c091c0a
vllm/model_executor/models/transformers.py
@@ -229,7 +229,10 @@ def tensor_parallel(self):
229
Apply the model's tensor parallelization plan.
230
Currently only supports linear layers.
231
"""
232
- if self.tp_size > 1 and self.config.base_model_tp_plan is None:
+ if not self.model.supports_tp_plan:
233
+ if self.tp_size <= 1:
234
+ return
235
+
236
raise ValueError(
237
f"{type(self.model)} does not support tensor parallel yet!")
238
0 commit comments