Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【pir】 add tensorarray op createarrylike, add_n #60460

Merged
merged 48 commits into from
Jan 3, 2024
Merged
Changes from 1 commit
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
001d799
optimize backward
xiaoguoguo626807 Dec 8, 2023
05ca298
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 11, 2023
4fd113e
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 12, 2023
8f60538
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 13, 2023
8854896
[PIR] add vjp interface for while op
winter-wang Dec 12, 2023
7e177f6
[PIR] fix ci error.
winter-wang Dec 13, 2023
11c8656
modify while stopgradient
xiaoguoguo626807 Dec 14, 2023
d8c3936
merge
xiaoguoguo626807 Dec 14, 2023
da62e16
merge
xiaoguoguo626807 Dec 15, 2023
67ed811
merge
xiaoguoguo626807 Dec 15, 2023
30bba32
modify while grad bug
xiaoguoguo626807 Dec 18, 2023
53f2920
merge
xiaoguoguo626807 Dec 18, 2023
fde161c
modify while grad op
xiaoguoguo626807 Dec 18, 2023
fdc12c7
modify
xiaoguoguo626807 Dec 18, 2023
e3d19b9
increment vp
xiaoguoguo626807 Dec 19, 2023
600d99c
merge
xiaoguoguo626807 Dec 20, 2023
0913436
[PIR] add get_used_external_value interface for block.
winter-wang Dec 19, 2023
63344b7
while case
xiaoguoguo626807 Dec 20, 2023
59ad2fc
delete print
xiaoguoguo626807 Dec 20, 2023
f4eceb6
delete print
xiaoguoguo626807 Dec 20, 2023
1c9eb96
Update python/paddle/autograd/ir_backward.py
xiaoguoguo626807 Dec 20, 2023
4beaa79
Merge branch 'develop' into while_2
xiaoguoguo626807 Dec 20, 2023
df0b46a
[PIR] add unit_test for get_used_external_value
winter-wang Dec 20, 2023
65083df
modify while_loop
xiaoguoguo626807 Dec 21, 2023
f2f4fa0
Merge branch 'while_2' of https://github.com/xiaoguoguo626807/Paddle …
xiaoguoguo626807 Dec 21, 2023
f8e3ac4
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 21, 2023
95bc3d7
code_style
xiaoguoguo626807 Dec 21, 2023
37e807c
modofy ci bug
xiaoguoguo626807 Dec 21, 2023
52afa31
Merge branch 'develop', commit 'refs/pull/60159/head' of https://gith…
xiaoguoguo626807 Dec 21, 2023
48de124
modify while api
xiaoguoguo626807 Dec 22, 2023
a7f13c9
merge
xiaoguoguo626807 Dec 25, 2023
adb627a
modify ci
xiaoguoguo626807 Dec 25, 2023
e90cd79
modify array
xiaoguoguo626807 Dec 26, 2023
17e17d4
merge
xiaoguoguo626807 Dec 26, 2023
1aa50c0
Update python/paddle/autograd/ir_backward.py
xiaoguoguo626807 Dec 26, 2023
eef3e24
Update test/legacy_test/test_cond.py
xiaoguoguo626807 Dec 26, 2023
d78b574
update
xiaoguoguo626807 Dec 26, 2023
d404059
modify array_write grad info
xiaoguoguo626807 Dec 26, 2023
fb8c52d
merge
xiaoguoguo626807 Dec 26, 2023
f3e09e5
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 26, 2023
44d856f
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
xiaoguoguo626807 Dec 27, 2023
39fcb4b
merge
xiaoguoguo626807 Dec 27, 2023
655482a
add_n and createarraylike
xiaoguoguo626807 Dec 29, 2023
ec43be4
merge
xiaoguoguo626807 Dec 29, 2023
785d367
conflict
xiaoguoguo626807 Dec 29, 2023
b6e2388
modify exe bug
xiaoguoguo626807 Dec 29, 2023
5315369
modify kernel choose
xiaoguoguo626807 Jan 2, 2024
5f60450
fix conflict
xiaoguoguo626807 Jan 2, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
merge
xiaoguoguo626807 committed Dec 15, 2023
commit da62e16ec64b2d801671359509c26021c4955938
35 changes: 16 additions & 19 deletions paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc
Original file line number Diff line number Diff line change
@@ -48,6 +48,7 @@ void IfOp::Build(pir::Builder &builder, // NOLINT
argument.output_types.swap(output_types);
argument.AddRegion().emplace_back();
argument.AddRegion().emplace_back();
cond.set_attribute(kStopGradientAttrName, builder.bool_attr(true));
}

void IfOp::Build(pir::Builder &builder, // NOLINT
@@ -256,8 +257,11 @@ void WhileOp::Build(pir::Builder &builder, // NOLINT
std::vector<pir::Attribute> outs_stop_gradient;
for (auto val : inputs) {
argument.AddOutput(val.type());
body.AddArgument(val.type());
auto arg = body.AddArgument(val.type());

auto bool_attr = val.attribute<pir::BoolAttribute>(kStopGradientAttrName);
arg.set_attribute(kStopGradientAttrName,
bool_attr ? bool_attr : builder.bool_attr(false));
outs_stop_gradient.push_back(bool_attr ? bool_attr
: builder.bool_attr(false));
}
@@ -339,6 +343,14 @@ std::vector<std::vector<pir::OpResult>> WhileOp::Vjp(
"the outputs size is %d.",
inputs.size(),
outputs.size()));
PADDLE_ENFORCE_EQ(inputs.size(),
out_grads.size() + 1,
phi::errors::InvalidArgument(
"while op's inputs' size should equal to "
"output_grads' size, Now the inputs's size is %d ."
"the output_grads size is %d.",
inputs.size(),
out_grads.size()));
PADDLE_ENFORCE_EQ(stop_gradients[0][0],
true,
phi::errors::InvalidArgument(
@@ -350,25 +362,12 @@ std::vector<std::vector<pir::OpResult>> WhileOp::Vjp(
std::vector<pir::Type> output_types;
std::vector<pir::Value> loop_vars;

for (size_t index = 0; index < inputs.size(); ++index) {
for (size_t index = 0; index < out_grads.size(); ++index) {
if (!stop_gradients[index + 1][0]) {
loop_vars.push_back(out_grads[index][0]);
}
}
// for (++index; index < inputs.size(); ++index) {
// if (!stop_gradients[index][0]) {
// auto fwd_type = inputs[index][0].type().dyn_cast<DenseTensorType>();
// PADDLE_ENFORCE_NE(
// fwd_type,
// pir::Type(),
// phi::errors::InvalidArgument(
// "The forward value type must be dense tensor type."));
// auto shape = vectorize(fwd_type.dims());
// auto dtype = TransToPhiDataType(fwd_type.dtype());
// auto full_op = builder.Build<FullOp>(shape, 0.0, dtype,
// phi::CPUPlace()); loop_vars.push_back(full_op.out());
// }
// }

auto while_grad = builder.Build<WhileOp>(cond_val, loop_vars);

std::vector<std::vector<pir::OpResult>> res(inputs.size());
@@ -397,9 +396,7 @@ std::vector<std::vector<pir::OpResult>> TuplePushOpVjpInterfaceModel::Vjp(
res[0].resize(1);
for (size_t i = 1u; i < inputs.size(); ++i) {
res[i].resize(1);
if (!stop_gradients[i][0]) {
res[i][0] = pop_op.result(i - 1);
}
res[i][0] = pop_op.result(i - 1);
}
return res;
}
19 changes: 18 additions & 1 deletion python/paddle/autograd/ir_backward.py
Original file line number Diff line number Diff line change
@@ -449,6 +449,23 @@ def make_output_with_output_grad(op):
outputs.append(new_value)
output_grads.append(state.value_to_valuegrad[value][0])

if op.name() == "pd_op.while":
for i, input in enumerate(get_real_op_inputs(op)):
if i <= len(op.results()):
continue
if (
input in state.value_to_valuegrad
and len(state.value_to_valuegrad[input]) > 1
):
append_add_n(input)

if (
input not in state.value_to_valuegrad
or state.value_to_valuegrad[input] == []
):
append_full_like(0.0, input, input, state, backward_ops)
output_grads.append(state.value_to_valuegrad[input][0])

return zero_flag, outputs, output_grads

def make_input_with_input_stopgradient(op):
@@ -576,7 +593,7 @@ def append_yield(block, base_inputs, base_inputs_grad):
# [op4] (op4's inputs and outputs are not vectorType)

# -----------------only for control flow-----------------#
# tuple_push value to pop value
# tuple_push value to tuple_pop value
control_flow_value_to_copyvalue_map = {}

if (
2 changes: 1 addition & 1 deletion test/cpp/pir/pass/pass_manager_test.cc
Original file line number Diff line number Diff line change
@@ -226,7 +226,7 @@ TEST(pass_manager, PassManager) {
true,
true));

pm.EnablePassTiming(true);
// pm.EnablePassTiming(true);

CHECK_EQ(pm.Run(&program), true);
}
25 changes: 13 additions & 12 deletions test/ir/pir/test_while_api.py
Original file line number Diff line number Diff line change
@@ -72,8 +72,8 @@ def test_while_op_vjp_interface(self):
]
self.assertEqual(len(while_input), 4)
while_input_stop_graditents = [[True], [False], [True], [True]]
while_output = [while_op.results()]
while_output_grad = [[out_grad, out_grad]]
while_output = [[value] for value in while_op.results()]
while_output_grad = [[out_grad], [out_grad], [out_grad]]
self.assertEqual(has_vjp(while_op), True)
grad_outs = call_vjp(
while_op,
@@ -90,16 +90,17 @@ def test_while_op_vjp_interface(self):
while_grad_output = while_grad_op.results()
self.assertEqual(len(while_grad_output), 1)

def test_while_base_backward(self):
main_program = self.construct_program_with_while()
full_op1 = main_program.global_block().ops[0]
while_op = main_program.global_block().ops[-1]
with paddle.pir.core.program_guard(main_program):
out = while_op.result(0) + 1
grad_outs = grad(
out,
[full_op1.result(0)],
)
def test_while_base_backward(self):
main_program = self.construct_program_with_while()
full_op1 = main_program.global_block().ops[0]
while_op = main_program.global_block().ops[-1]
with paddle.pir.core.program_guard(main_program):
out = while_op.result(0) + 1
grad_outs = grad(
out,
[full_op1.result(0)],
)
print(main_program)


if __name__ == "__main__":