Skip to content

Commit f82ba3c

Browse files
committed
fix: qwq-plus only supported stream
1 parent 0e29ce2 commit f82ba3c

File tree

1 file changed

+1
-1
lines changed
  • apps/setting/models_provider/impl/aliyun_bai_lian_model_provider/model

1 file changed

+1
-1
lines changed

apps/setting/models_provider/impl/aliyun_bai_lian_model_provider/model/llm.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def is_cache_model():
1414
@staticmethod
1515
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
1616
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
17-
if 'qwen-omni-turbo' in model_name:
17+
if 'qwen-omni-turbo' in model_name or 'qwq-plus' in model_name:
1818
optional_params['streaming'] = True
1919
return BaiLianChatModel(
2020
model=model_name,

0 commit comments

Comments
 (0)