Skip to content

Commit a49e6f5

Browse files
committed
Fix outdated version checks of accelerator
Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
1 parent 3e1a279 commit a49e6f5

File tree

1 file changed

+1
-13
lines changed

1 file changed

+1
-13
lines changed

tests/fsdp/test_fsdp.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -88,22 +88,11 @@ def get_master_port(real_launcher=False):
8888

8989

9090
if is_torch_available():
91-
from tests.trainer.test_trainer import ( # noqa
92-
RegressionModelConfig,
93-
RegressionPreTrainedModel,
94-
)
95-
9691
# hack to restore original logging level pre #21700
9792
get_regression_trainer = partial(tests.trainer.test_trainer.get_regression_trainer, log_level="info")
9893

99-
require_fsdp_version = require_fsdp
10094
if is_accelerate_available():
101-
from accelerate.utils.constants import (
102-
FSDP_PYTORCH_VERSION,
103-
FSDP_SHARDING_STRATEGY,
104-
)
105-
106-
require_fsdp_version = partial(require_fsdp, min_version=FSDP_PYTORCH_VERSION)
95+
from accelerate.utils.constants import FSDP_SHARDING_STRATEGY
10796

10897

10998
FSDP2_ACCELERATE_VERSION = "1.6.0"
@@ -142,7 +131,6 @@ def _parameterized_custom_name_func(func, param_num, param):
142131

143132
@require_accelerate
144133
@require_torch_accelerator
145-
@require_fsdp_version
146134
class TrainerIntegrationFSDP(TestCasePlus, TrainerIntegrationCommon):
147135
def setUp(self):
148136
super().setUp()

0 commit comments

Comments
 (0)