Skip to content

Commit

Permalink
disable dev FSDP support for 1.x until Lightning-AI/pytorch-lightning…
Browse files Browse the repository at this point in the history
…#18230 resolved
  • Loading branch information
speediedan committed Aug 6, 2023
1 parent e9edc88 commit b2f07ea
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
5 changes: 2 additions & 3 deletions src/finetuning_scheduler/strategy_adapters/fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,8 @@
)
from torch.distributed.fsdp.wrap import _ConfigAutoWrap, _or_policy, lambda_auto_wrap_policy, wrap

# TODO: at least temporarily disabling FSDP support < 2.0 due to upstream issues, re-assess before 2.1 release
# TODO: disabling FSDP support < 2.0 until https://github.com/Lightning-AI/lightning/issues/18230 resolved
if _TORCH_GREATER_EQUAL_2_0:
# TODO: alias can be dropped if disabling FSDP support < 2.0 in 2.1 release
from torch.distributed.fsdp._common_utils import _get_param_to_fqns as _get_params_to_fqns
from torch.distributed.fsdp._common_utils import _is_fsdp_flattened
from torch.distributed.fsdp.wrap import _FSDPPolicy
Expand Down Expand Up @@ -825,7 +824,7 @@ def _enable_name_based_overrides(self) -> Generator:
name_based_override_or_policy: Union[NameDrivenPolicy, Callable]
if _TORCH_GREATER_EQUAL_2_0:
name_based_override_or_policy = NameDrivenPolicy(auto_wrap_policy_handle, override_ids=override_ids)
# TODO: at least temporarily disabling FSDP support < 2.0 due to upstream issues, re-assess before 2.1 release
# TODO: disabling FSDP support < 2.0 until https://github.com/Lightning-AI/lightning/issues/18230 resolved
# else:
# name_driven_policy = partial(lambda_auto_wrap_policy, lambda_fn=lambda m: id(m) in override_ids)
# name_based_override_or_policy = partial(_or_policy, policies=[auto_wrap_policy_handle, name_driven_policy])
Expand Down
4 changes: 2 additions & 2 deletions tests/test_fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
"Please use torch.distributed.all_gather_into_tensor", # can be removed once PyTorch stops using internally,
"Please use torch.distributed.reduce_scatter_tensor", # can be removed once PyTorch stops using internally,
"when logging on epoch level in distributed", # validating FTS handling in this scenario
"Deallocating Tensor that still has live", # TODO: investigate the occasional occurance of this warning
"Deallocating Tensor that still has live",
"Conversion of an array with ndim > 0 to", # warning caused by deprecated behavior of tensorboard
]
EXPECTED_WARNS.extend(additional_fsdp_warns)
Expand Down Expand Up @@ -729,7 +729,7 @@ def policy(self):
"min2_0",
(path_default, *nones(3)),
),
# TODO: re-assess FSDP < 2.0 support before 2.1 release
# TODO: disabling FSDP support < 2.0 until https://github.com/Lightning-AI/lightning/issues/18230 resolved
# "cust_awp_prec_pt1x": (
# (base_model, cust_awp, True, 0, unwrap_7_mp, *nones(4)),
# "max2_0",
Expand Down

0 comments on commit b2f07ea

Please sign in to comment.