Skip to content

Commit

Permalink
add docs (#8952)
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli authored Aug 18, 2021
1 parent 413f7b2 commit 38ceb89
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion pytorch_lightning/trainer/supporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@ def _agg_memory(self, how: str):

@dataclass
class SharedCycleIteratorState:
"""A state shared between all CylceIterators in a CombinedLoader.
With a shared state, the iterators can decide to terminate based on the state of all others.
If the mode is *max_size_cycle*, all iterators need to have finished before the combined loading is considered
finished, and otherwise any iterator finishing early will lead to all iterators ending early.
"""

mode: str = "max_size_cycle"
dataloaders: List[DataLoader] = field(default_factory=lambda: [])
Expand All @@ -126,7 +132,7 @@ def reset(self) -> None:
@property
def done(self) -> bool:
if not self.has_reset:
raise MisconfigurationException("Please, call reset once all dataloaders have been added.")
raise MisconfigurationException("Please call reset once all dataloaders have been added.")
if len(self.dataloaders) == 1:
return False
decision_fn = all if self.mode == "max_size_cycle" else any
Expand Down

0 comments on commit 38ceb89

Please sign in to comment.