From e3bae584db468a912d79158db32214cacbbf9cc4 Mon Sep 17 00:00:00 2001 From: basicv8vc Date: Mon, 6 Jun 2022 09:49:16 +0800 Subject: [PATCH 1/3] update PolynomialLR doc, current_batch = min(decay_batch, current_batch) --- python/oneflow/nn/optimizer/polynomial_lr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/oneflow/nn/optimizer/polynomial_lr.py b/python/oneflow/nn/optimizer/polynomial_lr.py index 8b986203586..8ab23d0f426 100644 --- a/python/oneflow/nn/optimizer/polynomial_lr.py +++ b/python/oneflow/nn/optimizer/polynomial_lr.py @@ -36,7 +36,7 @@ class PolynomialLR(LRScheduler): .. math:: \begin{aligned} - & decay\_batch = min(decay\_batch, current\_batch) \\ + & current\_batch = min(decay\_batch, current\_batch) \\ & learning\_rate = (base\_lr-end\_lr)*(1-\frac{current\_batch}{decay\_batch})^{power}+end\_lr \end{aligned} From 6f7bcf18ec66ac1f0e2a94db619fd8033016b6f9 Mon Sep 17 00:00:00 2001 From: basicv8vc Date: Mon, 6 Jun 2022 09:49:16 +0800 Subject: [PATCH 2/3] * update PolynomialLR doc, current_batch = min(decay_batch, current_batch) * rename the steps to decay_batch in parameters --- python/oneflow/nn/optimizer/polynomial_lr.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/python/oneflow/nn/optimizer/polynomial_lr.py b/python/oneflow/nn/optimizer/polynomial_lr.py index 8b986203586..a9fa85f8132 100644 --- a/python/oneflow/nn/optimizer/polynomial_lr.py +++ b/python/oneflow/nn/optimizer/polynomial_lr.py @@ -36,13 +36,13 @@ class PolynomialLR(LRScheduler): .. math:: \begin{aligned} - & decay\_batch = min(decay\_batch, current\_batch) \\ + & current\_batch = min(decay\_batch, current\_batch) \\ & learning\_rate = (base\_lr-end\_lr)*(1-\frac{current\_batch}{decay\_batch})^{power}+end\_lr \end{aligned} Args: optimizer (Optimizer): Wrapper optimizer. - steps (int): The decayed steps. + decay_batch (int): The decayed steps. end_learning_rate (float, optional): The final learning rate. Defaults to 0.0001. power (float, optional): The power of polynomial. Defaults to 1.0. cycle (bool, optional): If cycle is True, the scheduler will decay the learning rate every decay steps. Defaults to False. @@ -55,7 +55,7 @@ class PolynomialLR(LRScheduler): ... polynomial_scheduler = flow.optim.lr_scheduler.PolynomialLR( - optimizer, steps=5, end_learning_rate=0.00001, power=2 + optimizer, decay_batch=5, end_learning_rate=0.00001, power=2 ) for epoch in range(num_epoch): @@ -66,15 +66,17 @@ class PolynomialLR(LRScheduler): def __init__( self, optimizer, - steps: int, + decay_batch: int, end_learning_rate: float = 0.0001, power: float = 1.0, cycle: bool = False, last_step: int = -1, verbose: bool = False, ): - assert steps > 0, f"steps must greater than zero, but got {steps}" - self.max_decay_steps = steps + assert ( + decay_batch > 0 + ), f"decay_batch must greater than zero, but got {decay_batch}" + self.max_decay_steps = decay_batch self.end_learning_rate = end_learning_rate self.power = power self.cycle = cycle From 74425a26bdeb4cc702fe55de9d6d79f5dafcb405 Mon Sep 17 00:00:00 2001 From: basicv8vc Date: Sun, 19 Jun 2022 11:38:32 +0800 Subject: [PATCH 3/3] update PolynomialLR test case --- python/oneflow/test/graph/test_graph_lr_scheduler.py | 4 ++-- python/oneflow/test/graph/test_graph_lrs.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/python/oneflow/test/graph/test_graph_lr_scheduler.py b/python/oneflow/test/graph/test_graph_lr_scheduler.py index 6ced90334ad..dbb13e561fa 100644 --- a/python/oneflow/test/graph/test_graph_lr_scheduler.py +++ b/python/oneflow/test/graph/test_graph_lr_scheduler.py @@ -181,7 +181,7 @@ def test_polynomial_lr(self): base_lr=0.1, iters=20, lr_scheduler=flow.optim.lr_scheduler.PolynomialLR, - steps=20, + decay_batch=20, end_learning_rate=1e-5, power=2.0, atol=1e-5, @@ -191,7 +191,7 @@ def test_polynomial_lr(self): base_lr=0.01, iters=20, lr_scheduler=flow.optim.lr_scheduler.PolynomialLR, - steps=20, + decay_batch=20, end_learning_rate=1e-4, power=1.0, cycle=True, diff --git a/python/oneflow/test/graph/test_graph_lrs.py b/python/oneflow/test/graph/test_graph_lrs.py index adedb2205a7..76fcd4c60bc 100644 --- a/python/oneflow/test/graph/test_graph_lrs.py +++ b/python/oneflow/test/graph/test_graph_lrs.py @@ -183,7 +183,7 @@ def _lr_fn(parameters): of_sgd = flow.optim.SGD(parameters, lr=0.001) lr = flow.optim.lr_scheduler.PolynomialLR( - of_sgd, steps=10, end_learning_rate=0.00001, power=2, cycle=True + of_sgd, decay_batch=10, end_learning_rate=0.00001, power=2, cycle=True ) return of_sgd, lr