Skip to content

Commit

Permalink
FIX Use trl version of tiny random llama (#1681)
Browse files Browse the repository at this point in the history
Using the version from HuggingFaceM4 broke our tests because it was
updated. Although the update is reverted, we still better switch to this
version, which is explicitly for testing and should be stable.
  • Loading branch information
BenjaminBossan authored Apr 25, 2024
1 parent 3d9529d commit f0d3c6b
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
6 changes: 3 additions & 3 deletions tests/test_common_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ def test_lora_seq2seq_lm_multi_gpu_inference(self):
@require_bitsandbytes
def test_adaption_prompt_8bit(self):
model = LlamaForCausalLM.from_pretrained(
"HuggingFaceM4/tiny-random-LlamaForCausalLM",
"trl-internal-testing/tiny-random-LlamaForCausalLM",
quantization_config=BitsAndBytesConfig(load_in_8bit=True),
torch_dtype=torch.float16,
device_map="auto",
Expand All @@ -492,7 +492,7 @@ def test_adaption_prompt_8bit(self):
@require_bitsandbytes
def test_adaption_prompt_4bit(self):
model = LlamaForCausalLM.from_pretrained(
"HuggingFaceM4/tiny-random-LlamaForCausalLM",
"trl-internal-testing/tiny-random-LlamaForCausalLM",
quantization_config=BitsAndBytesConfig(load_in_4bit=True),
torch_dtype=torch.float16,
device_map="auto",
Expand Down Expand Up @@ -982,7 +982,7 @@ def test_4bit_dora_merging(self):
bnb_4bit_compute_dtype=torch.float32,
)
model = AutoModelForCausalLM.from_pretrained(
"HuggingFaceM4/tiny-random-LlamaForCausalLM",
"trl-internal-testing/tiny-random-LlamaForCausalLM",
quantization_config=bnb_config,
torch_dtype=torch.float32,
).eval()
Expand Down
4 changes: 2 additions & 2 deletions tests/test_decoder_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"hf-internal-testing/tiny-random-gpt_neo",
"hf-internal-testing/tiny-random-GPTJForCausalLM",
"hf-internal-testing/tiny-random-GPTBigCodeForCausalLM",
"HuggingFaceM4/tiny-random-LlamaForCausalLM",
"trl-internal-testing/tiny-random-LlamaForCausalLM",
]

FULL_GRID = {
Expand Down Expand Up @@ -340,7 +340,7 @@ def test_passing_input_embeds_works(self, test_name, model_id, config_cls, confi
self._test_passing_input_embeds_works(test_name, model_id, config_cls, config_kwargs)

def test_lora_layer_replication(self):
model_id = "HuggingFaceM4/tiny-random-LlamaForCausalLM"
model_id = "trl-internal-testing/tiny-random-LlamaForCausalLM"
config_kwargs = {
"target_modules": ["down_proj", "up_proj"],
"task_type": "CAUSAL_LM",
Expand Down

0 comments on commit f0d3c6b

Please sign in to comment.