From cabb36b66b2c429532a867da0900439a49d03a76 Mon Sep 17 00:00:00 2001 From: kangguangli Date: Fri, 21 Jul 2023 18:53:00 +0800 Subject: [PATCH] [NewIR][BugFix] fix empty_var_name problem (#55546) * fix empty_var_name problem * fix coverage ci * fix coverage ci --- .../ir_adaptor/translator/op_translator.cc | 17 ++++--- .../translator/program_translator.cc | 1 + test/ir/new_ir/test_special_op_translator.py | 47 +++++++++++-------- 3 files changed, 36 insertions(+), 29 deletions(-) diff --git a/paddle/fluid/ir_adaptor/translator/op_translator.cc b/paddle/fluid/ir_adaptor/translator/op_translator.cc index dd59141d2d3191..0aab57af7998a4 100644 --- a/paddle/fluid/ir_adaptor/translator/op_translator.cc +++ b/paddle/fluid/ir_adaptor/translator/op_translator.cc @@ -480,14 +480,7 @@ OpTranscriber::GenerateOperationOutput(ir::IrContext* ctx, continue; } - const auto& origin_legacy_output_vars = op_desc.Output(legacy_output_name); - std::vector legacy_output_vars; - std::copy_if( - origin_legacy_output_vars.begin(), - origin_legacy_output_vars.end(), - std::back_inserter(legacy_output_vars), - [](const auto& var_name) { return var_name != kEmptyVarName; }); - + const auto& legacy_output_vars = op_desc.Output(legacy_output_name); bool is_vector = (info.type_name.find("VectorType") != std::string::npos); // Specially process TensorArray, this because we cannot distinguish it with @@ -534,6 +527,11 @@ OpTranscriber::GenerateOperationOutput(ir::IrContext* ctx, << info.type_name << " " << legacy_output_name; std::vector types; for (const auto& var_name : legacy_output_vars) { + if (var_name == kEmptyVarName) { + types.push_back(ir::Type(nullptr)); + arg_to_idx[var_name] = cur_output_idx; + continue; + } VarDesc* var = block->FindVarRecursive(var_name); VLOG(10) << "[output translating]" << "[" << op_desc.Type() << "]" << info.name << " " << var_name @@ -562,7 +560,8 @@ ir::AttributeMap OpTranscriber::TranslateOpAttribute( for (const auto& info : op_attr_infos) { auto legacy_attr_name = op_normalizer.GetLegacyAttrName(op_desc.Type(), info.name); - + VLOG(10) << "[op: " << op_desc.Type() + << "][attr] from: " << legacy_attr_name << " to: " << info.name; if (op_desc.HasAttr(legacy_attr_name)) { paddle::framework::Attribute legacy_attr = op_desc.GetAttr(legacy_attr_name); diff --git a/paddle/fluid/ir_adaptor/translator/program_translator.cc b/paddle/fluid/ir_adaptor/translator/program_translator.cc index 2a3e7cf6074144..b162e8198b9937 100644 --- a/paddle/fluid/ir_adaptor/translator/program_translator.cc +++ b/paddle/fluid/ir_adaptor/translator/program_translator.cc @@ -179,6 +179,7 @@ void ProgramTranslator::SetParameterFromSingleBlock(const BlockDesc& block) { bool need_set_parameter_op = (parameter_name_mappings_.find(var_name) != parameter_name_mappings_.end()); need_set_parameter_op &= (parameter_visited_.count(var_name) == 0); + need_set_parameter_op &= (param_map_.count(var_name) != 0); if (need_set_parameter_op) { ir::OpResult defining_op_result = param_map_[var_name].value; ir::Operation* op = InsertSetParamaterOp( diff --git a/test/ir/new_ir/test_special_op_translator.py b/test/ir/new_ir/test_special_op_translator.py index 5cfc2f9d875ec5..2ab4819d88a108 100644 --- a/test/ir/new_ir/test_special_op_translator.py +++ b/test/ir/new_ir/test_special_op_translator.py @@ -33,10 +33,7 @@ def test_op(self): x = paddle.to_tensor([2, 3, 4], 'float64') y = paddle.cast(x, 'uint8') - default_job = core.Job("default") - type_to_program = {"default": main_program.desc} - plan = core.Plan([default_job], type_to_program) - new_exe = core.StandaloneExecutor(place, plan, new_scope) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) class TestEmbeddingOpTranscriber(unittest.TestCase): @@ -53,10 +50,7 @@ def test_op(self): ) output = embedding(x) - default_job = core.Job("default") - type_to_program = {"default": main_program.desc} - plan = core.Plan([default_job], type_to_program) - new_exe = core.StandaloneExecutor(place, plan, new_scope) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) class TestIncrementOpTranscriber(unittest.TestCase): @@ -70,10 +64,7 @@ def test_op(self): data = paddle.zeros(shape=[1], dtype='float32') counter = paddle.increment(data) - default_job = core.Job("default") - type_to_program = {"default": main_program.desc} - plan = core.Plan([default_job], type_to_program) - new_exe = core.StandaloneExecutor(place, plan, new_scope) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) class TestAssignValueOpTranscriber(unittest.TestCase): @@ -90,10 +81,7 @@ def test_op(self): stop_gradient=False, ) - default_job = core.Job("default") - type_to_program = {"default": main_program.desc} - plan = core.Plan([default_job], type_to_program) - new_exe = core.StandaloneExecutor(place, plan, new_scope) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) class TestRnnOpTranscriber(unittest.TestCase): @@ -110,10 +98,29 @@ def test_op(self): cell = paddle.nn.SimpleRNNCell(16, 32) y, h = cell(x, prev_h) - default_job = core.Job("default") - type_to_program = {"default": main_program.desc} - plan = core.Plan([default_job], type_to_program) - new_exe = core.StandaloneExecutor(place, plan, new_scope) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) + + +class TestEmptyVarTranslate(unittest.TestCase): + def test_op(self): + place = core.Place() + place.set_place(paddle.CPUPlace()) + new_scope = paddle.static.Scope() + main_program = paddle.static.Program() + with paddle.static.scope_guard(new_scope): + with paddle.static.program_guard(main_program): + x1 = paddle.rand(shape=[3, 3], dtype="float32") + x1.stop_gradient = False + weight = paddle.full( + shape=[3, 3], fill_value="0.5", dtype="float32" + ) + y = paddle.nn.functional.linear(x1, weight) + y.stop_gradient = True + out1 = paddle.concat(x=[x1, y], axis=1) + out2 = paddle.mean(out1) + sgd_optimizer = paddle.optimizer.SGD(learning_rate=0.1) + sgd_optimizer.minimize(out2) + _ = paddle.fluid.core.translate_newirprogram(main_program.desc) class TestOneHotOpTranscriber(unittest.TestCase):