-
Notifications
You must be signed in to change notification settings - Fork 1.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[TESTING] Introduce testing util to manage models
This PR introduce a new env var MLC_TEST_MODEL_PATH to allow a list of model path specified for test model search purposes. If not found, an error message would appear and we auto skip test in both pytest and normal running settings. The path defaults to the cached HF path so as long as we run mlc_llm chat the model can be found. But we do not automatically download to avoid excessive networking in CI settings. Followup PR needed for remaining testcases
- Loading branch information
Showing
5 changed files
with
76 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,4 @@ | ||
""" | ||
Test and debug tools for MLC LLM | ||
""" | ||
from .pytest_utils import require_test_model |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
"""Extra utilities to mark tests""" | ||
import functools | ||
import warnings | ||
from pathlib import Path | ||
from typing import Callable | ||
|
||
import pytest | ||
|
||
from mlc_llm.support.constants import MLC_TEST_MODEL_PATH | ||
|
||
|
||
def require_test_model(model: str): | ||
"""Testcase decorator to require a model | ||
Examples | ||
-------- | ||
.. code:: | ||
@require_test_model("Llama-2-7b-chat-hf-q4f16_1-MLC") | ||
def test_reload_reset_unload(model): | ||
# model now points to the right path | ||
# specified by MLC_TEST_MODEL_PATH | ||
engine = mlc_llm.MLCEngine(model) | ||
# test code follows | ||
Parameters | ||
---------- | ||
model : str | ||
The model dir name | ||
""" | ||
model_path = None | ||
for base_path in MLC_TEST_MODEL_PATH: | ||
if (base_path / model / "mlc-chat-config.json").is_file(): | ||
model_path = base_path / model | ||
missing_model = model_path is None | ||
message = ( | ||
f"Model {model} does not exist in candidate paths {[str(p) for p in MLC_TEST_MODEL_PATH]}," | ||
" if you set MLC_TEST_MODEL_PATH env var, please ensure model paths are in the right location," | ||
" by default we reuse cache with mlc_llm chat, try to run mlc_llm chat to download right set of models." | ||
) | ||
|
||
def _decorator(func: Callable[[str], None]): | ||
wrapped = functools.partial(func, str(model_path)) | ||
wrapped.__name__ = func.__name__ | ||
|
||
@functools.wraps(wrapped) | ||
def wrapper(*args, **kwargs): | ||
if missing_model: | ||
print(f"{message} skipping...") | ||
return | ||
return wrapped(*args, **kwargs) | ||
|
||
return pytest.mark.skipif(missing_model, reason=message)(wrapper) | ||
|
||
return _decorator |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters