Skip to content

Commit

Permalink
[NewIR][BugFix] fix empty_var_name problem (PaddlePaddle#55546)
Browse files Browse the repository at this point in the history
* fix empty_var_name problem

* fix coverage ci

* fix coverage ci
  • Loading branch information
kangguangli authored and wz1qqx committed Jul 31, 2023
1 parent 7007587 commit cabb36b
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 29 deletions.
17 changes: 8 additions & 9 deletions paddle/fluid/ir_adaptor/translator/op_translator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -480,14 +480,7 @@ OpTranscriber::GenerateOperationOutput(ir::IrContext* ctx,
continue;
}

const auto& origin_legacy_output_vars = op_desc.Output(legacy_output_name);
std::vector<std::string> legacy_output_vars;
std::copy_if(
origin_legacy_output_vars.begin(),
origin_legacy_output_vars.end(),
std::back_inserter(legacy_output_vars),
[](const auto& var_name) { return var_name != kEmptyVarName; });

const auto& legacy_output_vars = op_desc.Output(legacy_output_name);
bool is_vector = (info.type_name.find("VectorType") != std::string::npos);

// Specially process TensorArray, this because we cannot distinguish it with
Expand Down Expand Up @@ -534,6 +527,11 @@ OpTranscriber::GenerateOperationOutput(ir::IrContext* ctx,
<< info.type_name << " " << legacy_output_name;
std::vector<ir::Type> types;
for (const auto& var_name : legacy_output_vars) {
if (var_name == kEmptyVarName) {
types.push_back(ir::Type(nullptr));
arg_to_idx[var_name] = cur_output_idx;
continue;
}
VarDesc* var = block->FindVarRecursive(var_name);
VLOG(10) << "[output translating]"
<< "[" << op_desc.Type() << "]" << info.name << " " << var_name
Expand Down Expand Up @@ -562,7 +560,8 @@ ir::AttributeMap OpTranscriber::TranslateOpAttribute(
for (const auto& info : op_attr_infos) {
auto legacy_attr_name =
op_normalizer.GetLegacyAttrName(op_desc.Type(), info.name);

VLOG(10) << "[op: " << op_desc.Type()
<< "][attr] from: " << legacy_attr_name << " to: " << info.name;
if (op_desc.HasAttr(legacy_attr_name)) {
paddle::framework::Attribute legacy_attr =
op_desc.GetAttr(legacy_attr_name);
Expand Down
1 change: 1 addition & 0 deletions paddle/fluid/ir_adaptor/translator/program_translator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ void ProgramTranslator::SetParameterFromSingleBlock(const BlockDesc& block) {
bool need_set_parameter_op = (parameter_name_mappings_.find(var_name) !=
parameter_name_mappings_.end());
need_set_parameter_op &= (parameter_visited_.count(var_name) == 0);
need_set_parameter_op &= (param_map_.count(var_name) != 0);
if (need_set_parameter_op) {
ir::OpResult defining_op_result = param_map_[var_name].value;
ir::Operation* op = InsertSetParamaterOp(
Expand Down
47 changes: 27 additions & 20 deletions test/ir/new_ir/test_special_op_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,7 @@ def test_op(self):
x = paddle.to_tensor([2, 3, 4], 'float64')
y = paddle.cast(x, 'uint8')

default_job = core.Job("default")
type_to_program = {"default": main_program.desc}
plan = core.Plan([default_job], type_to_program)
new_exe = core.StandaloneExecutor(place, plan, new_scope)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestEmbeddingOpTranscriber(unittest.TestCase):
Expand All @@ -53,10 +50,7 @@ def test_op(self):
)
output = embedding(x)

default_job = core.Job("default")
type_to_program = {"default": main_program.desc}
plan = core.Plan([default_job], type_to_program)
new_exe = core.StandaloneExecutor(place, plan, new_scope)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestIncrementOpTranscriber(unittest.TestCase):
Expand All @@ -70,10 +64,7 @@ def test_op(self):
data = paddle.zeros(shape=[1], dtype='float32')
counter = paddle.increment(data)

default_job = core.Job("default")
type_to_program = {"default": main_program.desc}
plan = core.Plan([default_job], type_to_program)
new_exe = core.StandaloneExecutor(place, plan, new_scope)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestAssignValueOpTranscriber(unittest.TestCase):
Expand All @@ -90,10 +81,7 @@ def test_op(self):
stop_gradient=False,
)

default_job = core.Job("default")
type_to_program = {"default": main_program.desc}
plan = core.Plan([default_job], type_to_program)
new_exe = core.StandaloneExecutor(place, plan, new_scope)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestRnnOpTranscriber(unittest.TestCase):
Expand All @@ -110,10 +98,29 @@ def test_op(self):
cell = paddle.nn.SimpleRNNCell(16, 32)
y, h = cell(x, prev_h)

default_job = core.Job("default")
type_to_program = {"default": main_program.desc}
plan = core.Plan([default_job], type_to_program)
new_exe = core.StandaloneExecutor(place, plan, new_scope)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestEmptyVarTranslate(unittest.TestCase):
def test_op(self):
place = core.Place()
place.set_place(paddle.CPUPlace())
new_scope = paddle.static.Scope()
main_program = paddle.static.Program()
with paddle.static.scope_guard(new_scope):
with paddle.static.program_guard(main_program):
x1 = paddle.rand(shape=[3, 3], dtype="float32")
x1.stop_gradient = False
weight = paddle.full(
shape=[3, 3], fill_value="0.5", dtype="float32"
)
y = paddle.nn.functional.linear(x1, weight)
y.stop_gradient = True
out1 = paddle.concat(x=[x1, y], axis=1)
out2 = paddle.mean(out1)
sgd_optimizer = paddle.optimizer.SGD(learning_rate=0.1)
sgd_optimizer.minimize(out2)
_ = paddle.fluid.core.translate_newirprogram(main_program.desc)


class TestOneHotOpTranscriber(unittest.TestCase):
Expand Down

0 comments on commit cabb36b

Please sign in to comment.