Skip to content

Commit

Permalink
rm useless print
Browse files Browse the repository at this point in the history
  • Loading branch information
levi131 committed Apr 18, 2022
1 parent dc4318e commit fd4a3b8
Showing 1 changed file with 0 additions and 3 deletions.
3 changes: 0 additions & 3 deletions python/paddle/autograd/new_adam_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -889,7 +889,6 @@ def backward(self,
Examples:
See examples in ``apply_gradients``.
"""
print("in new class backward")
act_no_grad_set = None
if framework._non_static_mode():
pass
Expand All @@ -913,7 +912,6 @@ def backward(self,
grad_var = param._grad_ivar()
params_grads.append((param, grad_var))
else:
print("in new backward else branch")
if callbacks is None:
callbacks = [error_clip_callback]
else:
Expand Down Expand Up @@ -1241,7 +1239,6 @@ def minimize(self,
"""
assert isinstance(loss, Variable), "The loss should be an Variable."

print("in base minimize")
parameter_list = parameter_list if parameter_list \
else self._parameter_list

Expand Down

0 comments on commit fd4a3b8

Please sign in to comment.