From 38ceb8943ef9b858abead1fbba43ea9a9b4cd93b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Wed, 18 Aug 2021 12:33:42 +0200 Subject: [PATCH] add docs (#8952) --- pytorch_lightning/trainer/supporters.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/supporters.py b/pytorch_lightning/trainer/supporters.py index 21f64e0780644..0e747a9e4857d 100644 --- a/pytorch_lightning/trainer/supporters.py +++ b/pytorch_lightning/trainer/supporters.py @@ -112,6 +112,12 @@ def _agg_memory(self, how: str): @dataclass class SharedCycleIteratorState: + """A state shared between all CylceIterators in a CombinedLoader. + + With a shared state, the iterators can decide to terminate based on the state of all others. + If the mode is *max_size_cycle*, all iterators need to have finished before the combined loading is considered + finished, and otherwise any iterator finishing early will lead to all iterators ending early. + """ mode: str = "max_size_cycle" dataloaders: List[DataLoader] = field(default_factory=lambda: []) @@ -126,7 +132,7 @@ def reset(self) -> None: @property def done(self) -> bool: if not self.has_reset: - raise MisconfigurationException("Please, call reset once all dataloaders have been added.") + raise MisconfigurationException("Please call reset once all dataloaders have been added.") if len(self.dataloaders) == 1: return False decision_fn = all if self.mode == "max_size_cycle" else any