diff --git a/python/paddle/optimizer/__init__.py b/python/paddle/optimizer/__init__.py index e75fbb2f20b35..756bf35486bf8 100644 --- a/python/paddle/optimizer/__init__.py +++ b/python/paddle/optimizer/__init__.py @@ -13,14 +13,10 @@ # limitations under the License. __all__ = [ - 'Adadelta', 'Adam', 'Adamax', 'AdamW', 'Momentum', 'MomentumOptimizer', - 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', - 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', - 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', - 'ReduceLROnPlateau', 'CosineAnnealingLR' + 'Optimizer', 'Adagrad', 'Adam', 'AdamW', 'Adamax', 'RMSProp', 'Adadelta', + 'SGD', 'Momentum', 'lr' ] - from .optimizer import Optimizer from .adagrad import Adagrad from .adam import Adam @@ -30,5 +26,4 @@ from .adadelta import Adadelta from .sgd import SGD from .momentum import Momentum - from . import lr diff --git a/python/paddle/optimizer/lr.py b/python/paddle/optimizer/lr.py index ab2c0fe905bfd..b430e089d2b6b 100644 --- a/python/paddle/optimizer/lr.py +++ b/python/paddle/optimizer/lr.py @@ -30,7 +30,7 @@ class LRScheduler(object): LRScheduler Base class. Define the common interface of a learning rate scheduler. - User can import it by ``form paddle.optimizer.lr import LRScheduler`` , + User can import it by ``from paddle.optimizer.lr import LRScheduler`` , then overload it for your subclass and have a custom implementation of ``get_lr()`` . @@ -50,7 +50,7 @@ class LRScheduler(object): .. code-block:: python import paddle - form paddle.optimizer.lr import LRScheduler + from paddle.optimizer.lr import LRScheduler class StepDecay(LRScheduler): def __init__(self, diff --git a/tools/wlist.json b/tools/wlist.json index 22bab658464cb..9844fa486cc04 100644 --- a/tools/wlist.json +++ b/tools/wlist.json @@ -24,6 +24,8 @@ } ], "wlist_temp_api":[ + "LRScheduler", + "ReduceOnPlateau", "append_LARS", "BuildStrategy.debug_graphviz_path", "BuildStrategy.enable_sequential_execution",