4141        # [Encoder-only]  
4242        pytest .param ("BAAI/bge-base-en-v1.5" ), 
4343        pytest .param ("sentence-transformers/all-MiniLM-L12-v2" ), 
44-         pytest .param ("intfloat/multilingual-e5-small" ), 
45-         pytest .param ("iic/gte-Qwen2-7B-instruct" ), 
46-         # # [Decoder-only]  
47-         pytest .param ("BAAI/bge-multilingual-gemma2" ), 
48-         pytest .param ("intfloat/e5-mistral-7b-instruct" ), 
49-         pytest .param ("iic/gte-Qwen2-1.5B-instruct" ), 
50-         pytest .param ("QwenCollection/Qwen2-7B-Instruct-embed-base" ), 
44+         #  pytest.param("intfloat/multilingual-e5-small"), 
45+         #  pytest.param("iic/gte-Qwen2-7B-instruct"), 
46+         # # #  [Decoder-only]  
47+         #  pytest.param("BAAI/bge-multilingual-gemma2"), 
48+         #  pytest.param("intfloat/e5-mistral-7b-instruct"), 
49+         #  pytest.param("iic/gte-Qwen2-1.5B-instruct"), 
50+         #  pytest.param("QwenCollection/Qwen2-7B-Instruct-embed-base"), 
5151        # # [Cross-Encoder]  
5252        # pytest.param("sentence-transformers/stsb-roberta-base-v2"),  
5353    ], 
@@ -63,6 +63,7 @@ def test_models(
6363) ->  None :
6464    with  monkeypatch .context () as  m :
6565        m .setenv ("VLLM_USE_MODELSCOPE" , "True" )
66+         m .setenv ("PYTORCH_NPU_ALLOC_CONF" , "max_split_size_mb:256" )
6667        vllm_extra_kwargs : Dict [str , Any ] =  {}
6768
6869        # The example_prompts has ending "\n", for example: 
0 commit comments