Skip to content

Commit

Permalink
fix optimizer update parameter is uninitialized
Browse files Browse the repository at this point in the history
  • Loading branch information
LiYuRio committed Dec 25, 2023
1 parent d0389be commit 3a829f4
Showing 1 changed file with 11 additions and 2 deletions.
13 changes: 11 additions & 2 deletions python/paddle/optimizer/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1193,15 +1193,24 @@ def _create_optimization_pass(
self._set_auxiliary_var('found_inf', False)
if isinstance(parameters_and_grads, list):
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None:
# Parameters can be uninitialized in pipeline parallel of semi-auto parallel.
# Since gradient clip and parameters update mixed up in one interface, so we
# need to filter again here.
if (
param_and_grad[1] is None
or not param_and_grad[0]._is_initialized()
):
continue
if param_and_grad[0].stop_gradient is False:
self._append_optimize_op(
target_block, param_and_grad
)
else:
for param_and_grad in parameters_and_grads['params']:
if param_and_grad[1] is None:
if (
param_and_grad[1] is None
or not param_and_grad[0]._is_initialized()
):
continue
if param_and_grad[0].stop_gradient is False:
param_grad_dict = {}
Expand Down

0 comments on commit 3a829f4

Please sign in to comment.