From 1bec83f4cf8ee30b39c43973636e147fb45899cd Mon Sep 17 00:00:00 2001 From: Wangzheee <634486483@qq.com> Date: Wed, 10 Aug 2022 19:35:42 +0800 Subject: [PATCH] disable_skip_layernorm_fp16 (#45041) --- .../tensorrt/convert/skip_layernorm.cc | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc b/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc index 831e117311771..d5252bb5a35ed 100644 --- a/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc +++ b/paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc @@ -22,7 +22,8 @@ namespace tensorrt { class SkipLayerNormOpConverter : public OpConverter { public: void operator()(const framework::proto::OpDesc& op, - const framework::Scope& scope, bool test_mode) override { + const framework::Scope& scope, + bool test_mode) override { #if IS_TRT_VERSION_GE(6000) VLOG(4) << "convert fused skip layernorm op to tensorrt layer"; framework::OpDesc op_desc(op, nullptr); @@ -63,7 +64,8 @@ class SkipLayerNormOpConverter : public OpConverter { auto creator = GetPluginRegistry()->getPluginCreator( "CustomSkipLayerNormPluginDynamic", "3"); PADDLE_ENFORCE_NE( - creator, nullptr, + creator, + nullptr, platform::errors::InvalidArgument( "fail to get creator of CustomSkipLayerNormPluginDynamic")); const std::vector fields{ @@ -85,7 +87,8 @@ class SkipLayerNormOpConverter : public OpConverter { inputs.data(), inputs.size(), *pluginObj); PADDLE_ENFORCE_NE( - plugin_layer, nullptr, + plugin_layer, + nullptr, platform::errors::InvalidArgument( "fail to add CustomSkipLayerNormPluginDynamic layer")); layer = plugin_layer; @@ -93,14 +96,16 @@ class SkipLayerNormOpConverter : public OpConverter { auto creator = GetPluginRegistry()->getPluginCreator( "CustomSkipLayerNormPluginDynamic", "2"); PADDLE_ENFORCE_NE( - creator, nullptr, + creator, + nullptr, platform::errors::InvalidArgument( "fail to get creator of CustomSkipLayerNormPluginDynamic")); int type = static_cast((engine_->WithFp16() == 1) ? nvinfer1::DataType::kHALF : nvinfer1::DataType::kFLOAT); int ld = input1->getDimensions().d[2]; // hidden dimension - PADDLE_ENFORCE_GT(ld, 0, + PADDLE_ENFORCE_GT(ld, + 0, platform::errors::InvalidArgument( "in CustomSkipLayerNormPluginDynamic hidden " "dimension should > 0")); @@ -128,18 +133,21 @@ class SkipLayerNormOpConverter : public OpConverter { inputs.data(), inputs.size(), *pluginObj); PADDLE_ENFORCE_NE( - plugin_layer, nullptr, + plugin_layer, + nullptr, platform::errors::InvalidArgument( "fail to add CustomSkipLayerNormPluginDynamic layer")); layer = plugin_layer; } } else { float eps = BOOST_GET_CONST(float, op_desc.GetAttr("epsilon")); - bool with_fp16 = - engine_->WithFp16() && !engine_->disable_trt_plugin_fp16(); + /* bool with_fp16 = + engine_->WithFp16() && !engine_->disable_trt_plugin_fp16(); + */ + bool with_fp16 = false; plugin::SkipLayerNormPluginDynamic* plugin = - new plugin::SkipLayerNormPluginDynamic(bias, scale, bias_size, - scale_size, eps, with_fp16); + new plugin::SkipLayerNormPluginDynamic( + bias, scale, bias_size, scale_size, eps, with_fp16); layer = engine_->AddDynamicPlugin(inputs.data(), 2, plugin); }