Skip to content

Commit

Permalink
update change logic, test=allcases
Browse files Browse the repository at this point in the history
  • Loading branch information
FeixLiu committed Aug 3, 2022
1 parent 6a50a42 commit ae6b7ca
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion python/paddle/optimizer/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,9 @@ def _create_global_learning_rate(self):
# lr var can't be float16, for pure fp16 training, should extra handle the dtype for lr
_lr_dtype = paddle.get_default_dtype(
) if self._dtype is None else self._dtype
_lr_dtype = paddle.float32 if _lr_dtype == paddle.float16 else _lr_dtype
_lr_dtype = paddle.float32 if (
paddle.get_default_dtype() != paddle.float16
and _lr_dtype == paddle.float16) else _lr_dtype
if isinstance(self._learning_rate, LRScheduler):
lr_var = self._global_learning_rate()
# only create global lr_var once
Expand Down

0 comments on commit ae6b7ca

Please sign in to comment.