diff --git a/tests/test_case.py b/tests/test_case.py index 07ce601..f656d20 100644 --- a/tests/test_case.py +++ b/tests/test_case.py @@ -115,7 +115,7 @@ def setUpClass(cls): print(f"Found {model_save_dir}. Skipping download.") # Model loading using vllm cls.tokenizer = AutoTokenizer.from_pretrained(model_save_dir) - cls.llm = LLM(model_save_dir, tokenizer=model_save_dir, gpu_memory_utilization=0.3) + cls.llm = LLM(model_save_dir, tokenizer=model_save_dir, gpu_memory_utilization=-1) # Load dataset data_save_dir = os.path.join(args.cache_dir, args.data_dir) diff --git a/tests/unit_test.py b/tests/unit_test.py index ffc1fa0..c761dd4 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -25,7 +25,7 @@ def model_setup(custom_args): print(f"Found {model_save_dir}. Skipping download.") tokenizer = AutoTokenizer.from_pretrained(model_save_dir) - llm = LLM(model_save_dir, tokenizer=model_save_dir, gpu_memory_utilization=0.3) + llm = LLM(model_save_dir, tokenizer=model_save_dir, gpu_memory_utilization=-1) data_save_dir = os.path.join(args.cache_dir, args.data_dir) dataset = load_dataset(args.data_dir, split='train')