Skip to content

Commit

Permalink
[FIX] offload_weight() takes from 3 to 4 positional arguments but 5…
Browse files Browse the repository at this point in the history
… were given (huggingface#29457)

* use require_torch_gpu

* enable on XPU

* fix
  • Loading branch information
faaany authored and astachowiczhabana committed May 9, 2024
1 parent f26e407 commit fc243e3
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions tests/test_modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,7 @@ def test_model_parallelism_gpt2(self):

@require_accelerate
@mark.accelerate_tests
@require_torch_accelerator
@require_torch_gpu
def test_from_pretrained_disk_offload_task_model(self):
model = AutoModel.from_pretrained("hf-internal-testing/tiny-random-gpt2")
device_map = {
Expand Down Expand Up @@ -892,7 +892,7 @@ def test_from_pretrained_disk_offload_task_model(self):

@require_accelerate
@mark.accelerate_tests
@require_torch_accelerator
@require_torch_gpu
def test_from_pretrained_disk_offload_derived_to_base_model(self):
derived_model = AutoModelForCausalLM.from_pretrained("hf-internal-testing/tiny-random-gpt2")

Expand Down

0 comments on commit fc243e3

Please sign in to comment.