Skip to content

Commit c3a722f

Browse files
authored
[CI Failure] Fix tests with missing TinyLlama-1.1B-Chat-v1.0-FP8-e2e (#26816)
Signed-off-by: mgoin <mgoin64@gmail.com>
1 parent aba48f7 commit c3a722f

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

tests/compile/test_async_tp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ def async_tp_pass_on_test_model(
332332

333333
# this is a fake model name to construct the model config
334334
# in the vllm_config, it's not really used.
335-
model_name = "nm-testing/TinyLlama-1.1B-Chat-v1.0-FP8-e2e"
335+
model_name = "RedHatAI/Llama-3.2-1B-Instruct-FP8"
336336
vllm_config.model_config = ModelConfig(
337337
model=model_name, trust_remote_code=True, dtype=dtype, seed=42
338338
)

tests/compile/test_fusion_all_reduce.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ def all_reduce_fusion_pass_on_test_model(
229229

230230
# this is a fake model name to construct the model config
231231
# in the vllm_config, it's not really used.
232-
model_name = "nm-testing/TinyLlama-1.1B-Chat-v1.0-FP8-e2e"
232+
model_name = "RedHatAI/Llama-3.2-1B-Instruct-FP8"
233233
vllm_config.model_config = ModelConfig(
234234
model=model_name, trust_remote_code=True, dtype=dtype, seed=42
235235
)

tests/compile/test_sequence_parallelism.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ def sequence_parallelism_pass_on_test_model(
278278

279279
# this is a fake model name to construct the model config
280280
# in the vllm_config, it's not really used.
281-
model_name = "nm-testing/TinyLlama-1.1B-Chat-v1.0-FP8-e2e"
281+
model_name = "RedHatAI/Llama-3.2-1B-Instruct-FP8"
282282
vllm_config.model_config = ModelConfig(
283283
model=model_name, trust_remote_code=True, dtype=dtype, seed=42
284284
)

0 commit comments

Comments
 (0)