Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix available modules #11526

Merged
merged 6 commits into from
Feb 3, 2022
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Disbled sampler replacement when using `IterableDataset` ([#11507](https://github.com/PyTorchLightning/pytorch-lightning/pull/11507))


- Fixed check for available modules ([#11526](https://github.com/PyTorchLightning/pytorch-lightning/pull/11526))


## [1.5.8] - 2022-01-05

### Fixed
Expand Down
56 changes: 42 additions & 14 deletions pytorch_lightning/utilities/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,46 @@
from pkg_resources import DistributionNotFound


def _package_available(package_name: str) -> bool:
"""Check if a package is available in your environment.

>>> _package_available('os')
True
>>> _package_available('bla')
False
"""
try:
return find_spec(package_name) is not None
except AttributeError:
# Python 3.6
return False
except (ImportError, ModuleNotFoundError):
# Python 3.7+
return False
Borda marked this conversation as resolved.
Show resolved Hide resolved


def _module_available(module_path: str) -> bool:
"""Check if a path is available in your environment.
"""Check if a module path is available in your environment.

>>> _module_available('os')
True
Borda marked this conversation as resolved.
Show resolved Hide resolved
>>> _module_available('os.bla')
False
carmocca marked this conversation as resolved.
Show resolved Hide resolved
>>> _module_available('bla.bla')
False
"""
module_names = module_path.split(".")
if not _package_available(module_names[0]):
return False
try:
return find_spec(module_path) is not None
except ModuleNotFoundError:
module = importlib.import_module(module_names[0])
except ImportError:
return False
for name in module_names[1:]:
if not hasattr(module, name):
return False
module = getattr(module, name)
return True


def _compare_version(package: str, op: Callable, version: str, use_base_version: bool = False) -> bool:
Expand Down Expand Up @@ -72,26 +100,26 @@ def _compare_version(package: str, op: Callable, version: str, use_base_version:
# _TORCH_GREATER_EQUAL_DEV_1_11 = _compare_version("torch", operator.ge, "1.11.0", use_base_version=True)

_APEX_AVAILABLE = _module_available("apex.amp")
_DEEPSPEED_AVAILABLE = _module_available("deepspeed")
_DEEPSPEED_AVAILABLE = _package_available("deepspeed")
_FAIRSCALE_AVAILABLE = not _IS_WINDOWS and _module_available("fairscale.nn")
_FAIRSCALE_OSS_FP16_BROADCAST_AVAILABLE = _FAIRSCALE_AVAILABLE and _compare_version("fairscale", operator.ge, "0.3.3")
_FAIRSCALE_FULLY_SHARDED_AVAILABLE = _FAIRSCALE_AVAILABLE and _compare_version("fairscale", operator.ge, "0.3.4")
_GROUP_AVAILABLE = not _IS_WINDOWS and _module_available("torch.distributed.group")
_HOROVOD_AVAILABLE = _module_available("horovod.torch")
_HYDRA_AVAILABLE = _module_available("hydra")
_HOROVOD_AVAILABLE = _package_available("horovod.torch")
Borda marked this conversation as resolved.
Show resolved Hide resolved
_HYDRA_AVAILABLE = _package_available("hydra")
_HYDRA_EXPERIMENTAL_AVAILABLE = _module_available("hydra.experimental")
_JSONARGPARSE_AVAILABLE = _module_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.0.0")
_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse") and _compare_version("jsonargparse", operator.ge, "4.0.0")
_KINETO_AVAILABLE = _TORCH_GREATER_EQUAL_1_8_1 and torch.profiler.kineto_available()
_NEPTUNE_AVAILABLE = _module_available("neptune")
_NEPTUNE_AVAILABLE = _package_available("neptune")
_NEPTUNE_GREATER_EQUAL_0_9 = _NEPTUNE_AVAILABLE and _compare_version("neptune", operator.ge, "0.9.0")
_OMEGACONF_AVAILABLE = _module_available("omegaconf")
_POPTORCH_AVAILABLE = _module_available("poptorch")
_RICH_AVAILABLE = _module_available("rich") and _compare_version("rich", operator.ge, "10.2.2")
_OMEGACONF_AVAILABLE = _package_available("omegaconf")
_POPTORCH_AVAILABLE = _package_available("poptorch")
_RICH_AVAILABLE = _package_available("rich") and _compare_version("rich", operator.ge, "10.2.2")
_TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != "none"])
_TORCHTEXT_AVAILABLE = _module_available("torchtext")
_TORCHTEXT_AVAILABLE = _package_available("torchtext")
_TORCHTEXT_LEGACY: bool = _TORCHTEXT_AVAILABLE and _compare_version("torchtext", operator.lt, "0.11.0")
_TORCHVISION_AVAILABLE = _module_available("torchvision")
_XLA_AVAILABLE: bool = _module_available("torch_xla")
_TORCHVISION_AVAILABLE = _package_available("torchvision")
_XLA_AVAILABLE: bool = _package_available("torch_xla")

from pytorch_lightning.utilities.xla_device import XLADeviceUtils # noqa: E402

Expand Down