diff --git a/tests/models/blip_2/test_modeling_blip_2.py b/tests/models/blip_2/test_modeling_blip_2.py index ccf3051a170fca..984d432a3604b3 100644 --- a/tests/models/blip_2/test_modeling_blip_2.py +++ b/tests/models/blip_2/test_modeling_blip_2.py @@ -992,7 +992,7 @@ def test_inference_t5_multi_accelerator(self): # prepare image image = prepare_img() - inputs = processor(images=image, return_tensors="pt").to(0, dtype=torch.float16) + inputs = processor(images=image, return_tensors="pt").to(f"{torch_device}:0", dtype=torch.float16) predictions = model.generate(**inputs) generated_text = processor.batch_decode(predictions, skip_special_tokens=True)[0].strip() @@ -1003,7 +1003,7 @@ def test_inference_t5_multi_accelerator(self): # image and context prompt = "Question: which city is this? Answer:" - inputs = processor(images=image, text=prompt, return_tensors="pt").to(0, dtype=torch.float16) + inputs = processor(images=image, text=prompt, return_tensors="pt").to(f"{torch_device}:0", dtype=torch.float16) predictions = model.generate(**inputs) generated_text = processor.batch_decode(predictions, skip_special_tokens=True)[0].strip() diff --git a/tests/test_modeling_utils.py b/tests/test_modeling_utils.py index 7f82d0dfcaf632..8454f867742019 100755 --- a/tests/test_modeling_utils.py +++ b/tests/test_modeling_utils.py @@ -756,7 +756,7 @@ def test_model_parallelism_gpt2(self): tokenizer = AutoTokenizer.from_pretrained("openai-community/gpt2") inputs = tokenizer("Hello, my name is", return_tensors="pt") - output = model.generate(inputs["input_ids"].to(0)) + output = model.generate(inputs["input_ids"].to(f"{torch_device}:0")) text_output = tokenizer.decode(output[0].tolist()) self.assertEqual(text_output, "Hello, my name is John. I'm a writer, and I'm a writer. I'm")