Skip to content

Commit

Permalink
fix available modules (#11526)
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda authored and lexierule committed Feb 9, 2022
1 parent 88830aa commit 114ac41
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 14 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Skip testing with PyTorch 1.7 and Python 3.9 on Ubuntu ([#11217](https://github.com/PyTorchLightning/pytorch-lightning/pull/11217))
- Fixed type promotion when tensors of higher category than float are logged ([#11401](https://github.com/PyTorchLightning/pytorch-lightning/pull/11401))
- Fixed the format of the configuration saved automatically by the CLI's `SaveConfigCallback` ([#11532](https://github.com/PyTorchLightning/pytorch-lightning/pull/11532))
- Fixed check for available modules ([#11526](https://github.com/PyTorchLightning/pytorch-lightning/pull/11526))

### Changed

Expand Down
51 changes: 37 additions & 14 deletions pytorch_lightning/utilities/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,45 @@
from pkg_resources import DistributionNotFound


def _package_available(package_name: str) -> bool:
"""Check if a package is available in your environment.
>>> _package_available('os')
True
>>> _package_available('bla')
False
"""
try:
return find_spec(package_name) is not None
except ModuleNotFoundError:
return False


def _module_available(module_path: str) -> bool:
"""Check if a path is available in your environment.
"""Check if a module path is available in your environment.
>>> _module_available('os')
True
>>> _module_available('os.bla')
False
>>> _module_available('bla.bla')
False
"""
module_names = module_path.split(".")
if not _package_available(module_names[0]):
return False
try:
return find_spec(module_path) is not None
module = importlib.import_module(module_names[0])
except AttributeError:
# Python 3.6
return False
except ModuleNotFoundError:
# Python 3.7+
except ImportError:
return False
for name in module_names[1:]:
if not hasattr(module, name):
return False
module = getattr(module, name)
return True


def _compare_version(package: str, op: Callable, version: str, use_base_version: bool = False) -> bool:
Expand Down Expand Up @@ -78,25 +101,25 @@ def _compare_version(package: str, op: Callable, version: str, use_base_version:
# _TORCH_GREATER_EQUAL_DEV_1_11 = _compare_version("torch", operator.ge, "1.11.0", use_base_version=True)

_APEX_AVAILABLE = _module_available("apex.amp")
_DEEPSPEED_AVAILABLE = _module_available("deepspeed")
_DEEPSPEED_AVAILABLE = _package_available("deepspeed")
_FAIRSCALE_AVAILABLE = not _IS_WINDOWS and _module_available("fairscale.nn")
_FAIRSCALE_OSS_FP16_BROADCAST_AVAILABLE = _FAIRSCALE_AVAILABLE and _compare_version("fairscale", operator.ge, "0.3.3")
_FAIRSCALE_FULLY_SHARDED_AVAILABLE = _FAIRSCALE_AVAILABLE and _compare_version("fairscale", operator.ge, "0.3.4")
_GROUP_AVAILABLE = not _IS_WINDOWS and _module_available("torch.distributed.group")
_HOROVOD_AVAILABLE = _module_available("horovod.torch")
_HYDRA_AVAILABLE = _module_available("hydra")
_HYDRA_AVAILABLE = _package_available("hydra")
_HYDRA_EXPERIMENTAL_AVAILABLE = _module_available("hydra.experimental")
_JSONARGPARSE_AVAILABLE = _module_available("jsonargparse")
_JSONARGPARSE_AVAILABLE = _package_available("jsonargparse")
_KINETO_AVAILABLE = _TORCH_GREATER_EQUAL_1_8_1 and torch.profiler.kineto_available()
_NEPTUNE_AVAILABLE = _module_available("neptune")
_NEPTUNE_AVAILABLE = _package_available("neptune")
_NEPTUNE_GREATER_EQUAL_0_9 = _NEPTUNE_AVAILABLE and _compare_version("neptune", operator.ge, "0.9.0")
_OMEGACONF_AVAILABLE = _module_available("omegaconf")
_POPTORCH_AVAILABLE = _module_available("poptorch")
_RICH_AVAILABLE = _module_available("rich") and _compare_version("rich", operator.ge, "10.2.2")
_OMEGACONF_AVAILABLE = _package_available("omegaconf")
_POPTORCH_AVAILABLE = _package_available("poptorch")
_RICH_AVAILABLE = _package_available("rich") and _compare_version("rich", operator.ge, "10.2.2")
_TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != "none"])
_TORCHTEXT_AVAILABLE = _module_available("torchtext")
_TORCHVISION_AVAILABLE = _module_available("torchvision")
_XLA_AVAILABLE: bool = _module_available("torch_xla")
_TORCHTEXT_AVAILABLE = _package_available("torchtext")
_TORCHVISION_AVAILABLE = _package_available("torchvision")
_XLA_AVAILABLE: bool = _package_available("torch_xla")

from pytorch_lightning.utilities.xla_device import XLADeviceUtils # noqa: E402

Expand Down

0 comments on commit 114ac41

Please sign in to comment.