Skip to content

Commit

Permalink
clean inference logs when config.DisableGlogInfo is triggered (#36356) (
Browse files Browse the repository at this point in the history
#37212)

Co-authored-by: Pei Yang <peiyang@baidu.com>
  • Loading branch information
Shixiaowei02 and cryoco authored Nov 16, 2021
1 parent 287ca7d commit dc873eb
Show file tree
Hide file tree
Showing 11 changed files with 42 additions and 33 deletions.
6 changes: 3 additions & 3 deletions paddle/fluid/framework/ir/fc_gru_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
graph, name_scope_, param_scope(), true /*with_fc_bias*/);

AddStatis(fusion_count);

string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count);
}

} // namespace ir
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const {
BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/);

AddStatis(fusion_count);

string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
fusion_count);
}

} // namespace ir
Expand Down
5 changes: 3 additions & 2 deletions paddle/fluid/framework/ir/layer_norm_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {

gpd(graph, handler);
AddStatis(found_layer_norm_count);
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
}

} // namespace ir
Expand Down
5 changes: 3 additions & 2 deletions paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(

gpd(graph, handler);
AddStatis(found_bn_act_count);
PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
}

} // namespace ir
Expand Down
9 changes: 5 additions & 4 deletions paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate"));
std::string type = approximate ? "_tanh" : "_erf";
fc_op->SetAttr("activation_type", act_type + type);
} else
} else {
fc_op->SetAttr("activation_type", act_type);

}
fc_op->SetAttr("use_mkldnn", true);

fc_op->SetOutput("Out", {act_out->Name()});
Expand All @@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,

gpd(graph, handler);
AddStatis(found_fc_act_count);
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
act_type);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
act_type);
}

} // namespace ir
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const {

gpd(graph, handler);
AddStatis(found_matmul_transpose_reshape_count);
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
<< " MatmulTransposeReshape patterns";
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
<< " MatmulTransposeReshape patterns";
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
}
}
} // namespace ir
} // namespace framework
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(fused_count);

PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
fused_count);
}

} // namespace ir
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(fused_count);

PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
fused_count);
}

} // namespace ir
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse(

gpd(graph, handler);
AddStatis(found_reshape_transpose_matmul_count);

std::stringstream msg_ss;
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
<< " ReshapeTransposeMatmulMkldnn patterns";
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
string::PrettyLogDetail(msg_ss.str().c_str());
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
<< " ReshapeTransposeMatmulMkldnn patterns";
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
string::PrettyLogDetail(msg_ss.str().c_str());
}
}

void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const {
Expand Down
5 changes: 3 additions & 2 deletions paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(found_scale_matmul_fuse_count);
PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
}

} // namespace ir
Expand Down
2 changes: 2 additions & 0 deletions paddle/fluid/inference/analysis/ir_pass_manager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,8 @@ void IRPassManager::CreatePasses(Argument *argument,
pass->Set("use_fc_padding", new bool(use_fc_padding));
}

pass->Set("disable_logs", new bool(disable_logs_));

pre_pass = pass_name;

passes_.emplace_back(std::move(pass));
Expand Down

0 comments on commit dc873eb

Please sign in to comment.