Skip to content

Commit

Permalink
build(deps): update pytorch-lightning requirement from <2.0.0,>=1.5 t…
Browse files Browse the repository at this point in the history
…o >=1.5,<3.0.0 in /requirements (#1636)

* build(deps): update pytorch-lightning requirement in /requirements

Updates the requirements on [pytorch-lightning](https://github.com/Lightning-AI/lightning) to permit the latest version.
- [Release notes](https://github.com/Lightning-AI/lightning/releases)
- [Commits](Lightning-AI/pytorch-lightning@1.5.0...2.0.0)

---
updated-dependencies:
- dependency-name: pytorch-lightning
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

* update
* 1.6
* .0
* pl
* lai

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Nicki Skafte Detlefsen <skaftenicki@gmail.com>
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
Co-authored-by: Jirka <jirka.borovec@seznam.cz>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Jirka Borovec <6035284+Borda@users.noreply.github.com>
  • Loading branch information
6 people authored Apr 13, 2023
1 parent f2b85bb commit d0a8452
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 15 deletions.
1 change: 0 additions & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,6 @@ def package_list_from_file(file):
# define mapping from PyPI names to python imports
PACKAGE_MAPPING = {
"PyYAML": "yaml",
"pytorch-lightning": "pytorch_lightning",
}
MOCK_PACKAGES = []
if SPHINX_MOCK_REQUIREMENTS:
Expand Down
2 changes: 1 addition & 1 deletion docs/source/pages/lightning.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import torch
from torch.nn import Module
from pytorch_lightning.core.lightning import LightningModule
from lightning import LightningModule
from torchmetrics import Metric

#################################
Expand Down
4 changes: 2 additions & 2 deletions docs/source/pages/overview.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
.. testsetup:: *

import torch
from pytorch_lightning.core.lightning import LightningModule
from lightning import LightningModule

##################
Structure Overview
Expand Down Expand Up @@ -96,7 +96,7 @@ be moved to the same device as the input of the metric:
print(out.device) # cuda:0
However, when **properly defined** inside a :class:`~torch.nn.Module` or
:class:`~pytorch_lightning.core.lightning.LightningModule` the metric will be automatically moved
:class:`~lightning.LightningModule` the metric will be automatically moved
to the same device as the module when using ``.to(device)``. Being
**properly defined** means that the metric is correctly identified as a child module of the
model (check ``.children()`` attribute of the model). Therefore, metrics cannot be placed
Expand Down
2 changes: 2 additions & 0 deletions requirements/docs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ sphinx-paramlinks>=0.5.1
sphinx-togglebutton>=0.2
sphinx-copybutton>=0.3

lightning>=1.8.0, <3.0.0

# integrations
-r integrate.txt
-r visual.txt
Expand Down
2 changes: 1 addition & 1 deletion requirements/integrate.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
pytorch-lightning>=1.6.0, <2.0.0
pytorch-lightning >=1.6.0, <3.0.0
7 changes: 6 additions & 1 deletion tests/integrations/lightning/boring_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,14 @@
# limitations under the License.

import torch
from pytorch_lightning import LightningModule
from lightning_utilities import module_available
from torch.utils.data import Dataset

if module_available("lightning"):
from lightning import LightningModule
else:
from pytorch_lightning import LightningModule


class RandomDictStringDataset(Dataset):
"""Class for creating a dictionary of random strings."""
Expand Down
22 changes: 13 additions & 9 deletions tests/integrations/test_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,17 @@
from unittest import mock

import torch
from pytorch_lightning import LightningModule, Trainer
from lightning_utilities import module_available
from torch import tensor
from torch.nn import Linear
from torch.utils.data import DataLoader

if module_available("lightning"):
from lightning import LightningModule, Trainer
else:
from pytorch_lightning import LightningModule, Trainer

from integrations.helpers import no_warning_call
from integrations.lightning.boring_model import BoringModel, RandomDataset
from integrations.lightning.boring_model import BoringModel
from torchmetrics import MetricCollection, SumMetric
from torchmetrics.classification import BinaryAccuracy, BinaryAveragePrecision

Expand All @@ -40,7 +44,7 @@ class TestModel(BoringModel):
def __init__(self) -> None:
super().__init__()
self.metric = SumMetric()
self.sum = 0.0
self.register_buffer("sum", torch.tensor(0.0))

def training_step(self, batch, batch_idx):
x = batch
Expand Down Expand Up @@ -176,11 +180,11 @@ def __init__(self) -> None:
super().__init__()
self.metric_step = SumMetric()
self.metric_epoch = SumMetric()
self.sum = torch.tensor(0.0)
self.register_buffer("sum", torch.tensor(0.0))
self.outs = []

def on_train_epoch_start(self):
self.sum = torch.tensor(0.0)
self.sum = torch.tensor(0.0, device=self.sum.device)

def training_step(self, batch, batch_idx):
x = batch
Expand Down Expand Up @@ -221,8 +225,8 @@ class TestModel(BoringModel):
def __init__(self) -> None:
super().__init__()
self.metric = MetricCollection([SumMetric(), DiffMetric()])
self.sum = torch.tensor(0.0)
self.diff = torch.tensor(0.0)
self.register_buffer("sum", torch.tensor(0.0))
self.register_buffer("diff", torch.tensor(0.0))

def training_step(self, batch, batch_idx):
x = batch
Expand Down Expand Up @@ -265,7 +269,7 @@ def __init__(self) -> None:
# the metric is not used in the module's `forward`
# so the module should be exportable to TorchScript
self.metric = SumMetric()
self.sum = torch.tensor(0.0)
self.register_buffer("sum", torch.tensor(0.0))

def training_step(self, batch, batch_idx):
x = batch
Expand Down

0 comments on commit d0a8452

Please sign in to comment.