Skip to content

Commit

Permalink
Updated logic for disabling automatic stopping
Browse files Browse the repository at this point in the history
  • Loading branch information
EricWiener committed Aug 27, 2021
1 parent dc96793 commit 729930a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
8 changes: 2 additions & 6 deletions pytorch_lightning/loops/fit_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,9 @@ def done(self) -> bool:
Returns True if trainer.should_stop was set (e.g. by early stopping)
or if the maximum number of steps or epochs is reached.
"""
if self.max_epochs < 0 and self.max_steps is None:
# If max_epochs is negative and max_steps is not defined, disable automatic stopping
return False

# TODO(@awaelchli): Move track steps inside training loop and move part of these condition inside training loop
stop_steps = self.max_steps is not None and self.global_step >= self.max_steps
stop_epochs = self.max_epochs is not None and self.current_epoch >= self.max_epochs
stop_steps = self.max_steps not in [None, -1] and self.global_step >= self.max_steps
stop_epochs = self.max_epochs not in [None, -1] and self.current_epoch >= self.max_epochs

should_stop = False
if self.trainer.should_stop:
Expand Down
8 changes: 6 additions & 2 deletions pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,12 +265,15 @@ def __init__(
max_epochs: Stop training once this number of epochs is reached. Disabled by default (None).
If both max_epochs and max_steps are not specified, defaults to ``max_epochs`` = 1000.
To disable automatic stopping, specify a negative integer (and leave max_steps = None).
To disable automatic stopping, you can set ``max_epochs = -1`` and set ``max_steps`` as ``None``
or ``-1``. Note that if the the ``max_time`` limit is specified, it will still be observed.
min_epochs: Force training for at least these many epochs. Disabled by default (None).
If both min_epochs and min_steps are not specified, defaults to ``min_epochs`` = 1.
max_steps: Stop training after this number of steps. Disabled by default (None).
max_steps: Stop training after this number of steps. Disabled by default (None). If ``max_steps = None``
and ``max_epochs = None``, will default to ``max_epochs = 1000``. To override this
behavior, see ``max_epochs``.
min_steps: Force training for at least these number of steps. Disabled by default (None).
Expand Down Expand Up @@ -378,6 +381,7 @@ def __init__(
self.slurm_connector = SLURMConnector(self)
self.tuner = Tuner(self)

# max_epochs won't default to 1000 if max_steps/max_time are specified (including being set to -1).
fit_loop = FitLoop(
min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs),
max_epochs=(1000 if (max_epochs is None and max_steps is None and max_time is None) else max_epochs),
Expand Down

0 comments on commit 729930a

Please sign in to comment.