Skip to content

Commit f13cd39

Browse files
committed
+ tests
1 parent f40b372 commit f13cd39

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

tests/models/language/pooling_mteb_test/mteb_utils.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -212,10 +212,13 @@ def mteb_test_embed_models(hf_runner,
212212
vllm_dtype = vllm_model.llm.llm_engine.model_config.dtype
213213
head_dtype = model_config.head_dtype
214214

215-
# Test embed_dims, isnan and whether to use normalize
215+
# Test embedding_size, isnan and whether to use normalize
216216
vllm_outputs = vllm_model.embed(example_prompts,
217217
truncate_prompt_tokens=-1)
218-
assert not torch.any(torch.isnan(torch.tensor(vllm_outputs)))
218+
outputs_tensor = torch.tensor(vllm_outputs)
219+
assert not torch.any(torch.isnan(outputs_tensor))
220+
embedding_size = model_config.embedding_size
221+
assert torch.tensor(vllm_outputs).shape[-1] == embedding_size
219222

220223
# Accelerate mteb test by setting
221224
# SentenceTransformers mteb score to a constant
@@ -233,7 +236,7 @@ def mteb_test_embed_models(hf_runner,
233236
st_main_score = run_mteb_embed_task(hf_model, MTEB_EMBED_TASKS)
234237
st_dtype = next(hf_model.model.parameters()).dtype
235238

236-
# Test embed_dims and whether to use normalize
239+
# Check embeddings close to hf outputs
237240
hf_outputs = hf_model.encode(example_prompts)
238241
check_embeddings_close(
239242
embeddings_0_lst=hf_outputs,

vllm/transformers_utils/config.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1003,12 +1003,11 @@ def try_get_tokenizer_config(
10031003

10041004
@cache
10051005
def try_get_dense_modules(
1006-
pretrained_model_name_or_path: Union[str, os.PathLike],
1006+
model: Union[str, Path],
10071007
revision: Optional[str] = None,
10081008
) -> Optional[list[dict[str, Any]]]:
10091009
try:
1010-
modules = get_hf_file_to_dict("modules.json",
1011-
pretrained_model_name_or_path, revision)
1010+
modules = get_hf_file_to_dict("modules.json", model, revision)
10121011
if not modules:
10131012
return None
10141013

@@ -1027,9 +1026,7 @@ def try_get_dense_modules(
10271026
folder = module.get("path", "")
10281027

10291028
config_path = f"{folder}/config.json" if folder else "config.json"
1030-
layer_config = get_hf_file_to_dict(config_path,
1031-
pretrained_model_name_or_path,
1032-
revision)
1029+
layer_config = get_hf_file_to_dict(config_path, model, revision)
10331030
if not layer_config:
10341031
continue
10351032
layer_config["folder"] = folder

0 commit comments

Comments
 (0)