From dc873eba17b849e33d4bc85d0968c8840d95fc37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=9F=B3=E6=99=93=E4=BC=9F?= <39303645+Shixiaowei02@users.noreply.github.com> Date: Tue, 16 Nov 2021 13:38:05 +0800 Subject: [PATCH] clean inference logs when config.DisableGlogInfo is triggered (#36356) (#37212) Co-authored-by: Pei Yang --- paddle/fluid/framework/ir/fc_gru_fuse_pass.cc | 6 +++--- paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc | 6 +++--- paddle/fluid/framework/ir/layer_norm_fuse_pass.cc | 5 +++-- .../ir/mkldnn/batch_norm_act_fuse_pass.cc | 5 +++-- .../ir/mkldnn/fc_act_mkldnn_fuse_pass.cc | 9 +++++---- .../mkldnn/matmul_transpose_reshape_fuse_pass.cc | 10 ++++++---- .../framework/ir/mkldnn/multi_gru_fuse_pass.cc | 6 +++--- .../ir/mkldnn/multi_gru_seq_fuse_pass.cc | 6 +++--- .../reshape_transpose_matmul_mkldnn_fuse_pass.cc | 15 ++++++++------- .../framework/ir/mkldnn/scale_matmul_fuse_pass.cc | 5 +++-- .../fluid/inference/analysis/ir_pass_manager.cc | 2 ++ 11 files changed, 42 insertions(+), 33 deletions(-) diff --git a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc index 9a43edf40ef44..52e88c6408b0e 100644 --- a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc +++ b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc @@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const { graph, name_scope_, param_scope(), true /*with_fc_bias*/); AddStatis(fusion_count); - - string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", - fusion_count); + if (!Has("disable_logs") || !Get("disable_logs")) + string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", + fusion_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc b/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc index 2e6ce1a0f7381..d72b626fc1ebc 100644 --- a/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc @@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const { BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/); AddStatis(fusion_count); - - string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns", - fusion_count); + if (!Has("disable_logs") || !Get("disable_logs")) + string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns", + fusion_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc index 95d55834f823b..86191587e1849 100644 --- a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc @@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const { gpd(graph, handler); AddStatis(found_layer_norm_count); - PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", - found_layer_norm_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", + found_layer_norm_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc index 3fdb87f254403..c5bb4bf0b2fc9 100644 --- a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc @@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( gpd(graph, handler); AddStatis(found_bn_act_count); - PrettyLogDetail("--- fused %d batch norm with relu activation", - found_bn_act_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d batch norm with relu activation", + found_bn_act_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc index 85d308c7eb30d..093fd5ec538db 100644 --- a/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc @@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate")); std::string type = approximate ? "_tanh" : "_erf"; fc_op->SetAttr("activation_type", act_type + type); - } else + } else { fc_op->SetAttr("activation_type", act_type); - + } fc_op->SetAttr("use_mkldnn", true); fc_op->SetOutput("Out", {act_out->Name()}); @@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, gpd(graph, handler); AddStatis(found_fc_act_count); - PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count, - act_type); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count, + act_type); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc index e5bdb08fe4ab4..a61099b498674 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc @@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const { gpd(graph, handler); AddStatis(found_matmul_transpose_reshape_count); - std::stringstream msg_ss; - msg_ss << "--- Fused " << found_matmul_transpose_reshape_count - << " MatmulTransposeReshape patterns"; - paddle::string::PrettyLogDetail(msg_ss.str().c_str()); + if (!Has("disable_logs") || !Get("disable_logs")) { + std::stringstream msg_ss; + msg_ss << "--- Fused " << found_matmul_transpose_reshape_count + << " MatmulTransposeReshape patterns"; + paddle::string::PrettyLogDetail(msg_ss.str().c_str()); + } } } // namespace ir } // namespace framework diff --git a/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc index 43c9849d5bbe3..76a0c883c8923 100644 --- a/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc @@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(fused_count); - - PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops", - fused_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops", + fused_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc index 17770d26d7de9..7821501cc4b23 100644 --- a/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc @@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(fused_count); - - PrettyLogDetail("--- fused %d sequences of two multi_gru ops", - fused_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d sequences of two multi_gru ops", + fused_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc index 26692849d977b..e408440f26f1c 100644 --- a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc @@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse( gpd(graph, handler); AddStatis(found_reshape_transpose_matmul_count); - - std::stringstream msg_ss; - msg_ss << "--- Fused " << found_reshape_transpose_matmul_count - << " ReshapeTransposeMatmulMkldnn patterns"; - if (with_reshape_xshape) msg_ss << " with reshape's xshape"; - if (with_transpose_xshape) msg_ss << " with transpose's xshape"; - string::PrettyLogDetail(msg_ss.str().c_str()); + if (!Has("disable_logs") || !Get("disable_logs")) { + std::stringstream msg_ss; + msg_ss << "--- Fused " << found_reshape_transpose_matmul_count + << " ReshapeTransposeMatmulMkldnn patterns"; + if (with_reshape_xshape) msg_ss << " with reshape's xshape"; + if (with_transpose_xshape) msg_ss << " with transpose's xshape"; + string::PrettyLogDetail(msg_ss.str().c_str()); + } } void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const { diff --git a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc index 13f1fa50d080a..0fc458723ffe4 100644 --- a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc @@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(found_scale_matmul_fuse_count); - PrettyLogDetail("--- fused %d scale with matmul", - found_scale_matmul_fuse_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d scale with matmul", + found_scale_matmul_fuse_count); } } // namespace ir diff --git a/paddle/fluid/inference/analysis/ir_pass_manager.cc b/paddle/fluid/inference/analysis/ir_pass_manager.cc index 3ee183a4aedcd..dcbbee97a772c 100644 --- a/paddle/fluid/inference/analysis/ir_pass_manager.cc +++ b/paddle/fluid/inference/analysis/ir_pass_manager.cc @@ -246,6 +246,8 @@ void IRPassManager::CreatePasses(Argument *argument, pass->Set("use_fc_padding", new bool(use_fc_padding)); } + pass->Set("disable_logs", new bool(disable_logs_)); + pre_pass = pass_name; passes_.emplace_back(std::move(pass));