Skip to content

Commit

Permalink
Merge pull request #1 from cxxly/science-add-rules-ut
Browse files Browse the repository at this point in the history
enable paddle.static.gradients for supporting primitive autograd
  • Loading branch information
levi131 authored May 12, 2022
2 parents bfdb991 + be0f4ef commit afdb3ee
Show file tree
Hide file tree
Showing 4 changed files with 231 additions and 207 deletions.
48 changes: 2 additions & 46 deletions python/paddle/fluid/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,45 +57,6 @@
]


@framework.static_only
def append_backward_new(loss_list,
parameter_list=None,
no_grad_set=None,
callbacks=None,
checkpoints=None,
distop_context=None):
from paddle.incubate.autograd.primx import orig2prim, Transform
program = default_main_program()
assert program.num_blocks == 1, "The append_backward_new interface is designed to process only one block."
block = program.current_block()

orig2prim(block)
ad = Transform(block)
if parameter_list is None:
parameter_list = program.global_block().all_parameters()
param_dot, loss_dot = ad.linearize(parameter_list, loss_list)
loss_bar, param_bar = ad.transpose(loss_dot, param_dot)

# remove param_dot and their constructor ops
op_indexes = []
for var in param_dot:
if var is not None:
op_index = block.ops.index(var.op)
assert op_index >= 0
op_indexes.append(op_index)

ad.erase_ops(sorted(op_indexes))
ad.erase_dots(param_dot)

if len(parameter_list) == 1:
params_and_grads = [(parameter_list, param_bar)]
else:
params_and_grads = []
for i, param in enumerate(parameter_list):
params_and_grads.append((param, param_bar[i]))
return params_and_grads


class Optimizer(object):
"""Optimizer Base class.
Expand Down Expand Up @@ -954,13 +915,8 @@ def backward(self,
parameter_list = parameter_list if parameter_list \
else self._parameter_list
with program_guard(program, startup_program):
from paddle.incubate.autograd.utils import prim_enabled
if prim_enabled():
params_grads = append_backward_new(
[loss], parameter_list, act_no_grad_set, callbacks)
else:
params_grads = append_backward(loss, parameter_list,
act_no_grad_set, callbacks)
params_grads = append_backward(loss, parameter_list,
act_no_grad_set, callbacks)
return params_grads

def _create_regularization_of_grad(self, param, grad, regularization=None):
Expand Down
99 changes: 1 addition & 98 deletions python/paddle/fluid/tests/unittests/autograd/test_primops.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,7 @@
transpose, split, concat, reduce, matmul, slice_select, slice_assign,
gather, scatter_add, fill_const)
from paddle.incubate.autograd.primx import Transform, topo_path, orig2prim, prim2orig, _gradients
from paddle.incubate.autograd.utils import enable_prim


def prog1(x, y):
t = paddle.matmul(x, y)
return t


def prog2(x, y):
t = paddle.multiply(x, x)
z = paddle.norm(t, p=2)
return z
from paddle.incubate.autograd.utils import enable_prim, disable_prim, prim_enabled


class TestPyPrimOps(unittest.TestCase):
Expand Down Expand Up @@ -154,92 +143,6 @@ def test_ops(self):
self.assertEqual(set_value_1.shape, d.shape)
self.assertEqual(set_value_1.dtype, d.dtype)

def test_gradients_set1(self):
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('X', shape=[100, 1, 2], dtype='float32')
y = paddle.static.data('Y', shape=[100, 2, 5], dtype='float32')
z = prog1(x, y)
x_grad, y_grad = _gradients([z], [x, y])
print(f'-------test_gradients_set1-------')
print(f'x_grad : {x_grad}')
print(f'y_grad : {y_grad}')
print(x.block)

def test_gradients_set2(self):
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('X', shape=[3, 3], dtype='float32')
y = paddle.static.data('Y', shape=[3, 3], dtype='float32')
t = paddle.matmul(x, x)
z = paddle.norm(t, p=2)
x_grad, y_grad = _gradients([z], [x, y])
print(x.block)

def test_gradients_set3(self):
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('X', shape=[3, 3], dtype='float32')
y = paddle.static.data('Y', shape=[3, 3], dtype='float32')
t = paddle.matmul(x, y)
z = paddle.tanh(t)
x_grad, y_grad = _gradients([z], [x, y])
print(f'-------test_gradients_set3-------')
print(x.block)

def test_second_order_gradients_set1(self):
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('X', shape=[3, 3], dtype='float32')
y = paddle.static.data('Y', shape=[3, 3], dtype='float32')
z = paddle.matmul(x, x) + x
x_grad, = _gradients([z], [x])
xx_grad, = _gradients(x_grad, [x])
print(f'-------test_second_order_gradients_set1-------')
print(f'x_grad: {x_grad.name}')
print(f'xx_grad: {xx_grad.name}')
print(x.block)

def test_second_order_gradients_set2(self):
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('x', shape=[121, 2], dtype='float32')
x.stop_gradient = False
w = paddle.static.create_parameter(
shape=[2, 2], dtype='float32', is_bias=False)
bias = paddle.static.create_parameter(
shape=[2], dtype='float32', is_bias=True)
y = paddle.matmul(x, w) + bias
jac, = _gradients([y], [x])

def test_minimize(self):
enable_prim()
place = paddle.CPUPlace()
if paddle.device.is_compiled_with_cuda():
place = paddle.CUDAPlace(0)
exe = paddle.static.Executor(place)
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
x = paddle.static.data('x', shape=[2, 20], dtype='float32')
x.stop_gradient = False
w = paddle.static.create_parameter(
shape=[20, 2], dtype='float32', is_bias=False)
bias = paddle.static.create_parameter(
shape=[2], dtype='float32', is_bias=True)
y = paddle.tanh(paddle.matmul(x, w) + bias)
loss = paddle.norm(y, p=2)
opt = paddle.fluid.optimizer.AdamOptimizer(0.01)
opt.minimize(loss)

print(f'-------test_minimize: orig-------')
print(x.block)


if __name__ == '__main__':
unittest.main()
Loading

0 comments on commit afdb3ee

Please sign in to comment.