diff --git a/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc b/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc index 2bd9213cae610..47509d025722d 100644 --- a/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc +++ b/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc @@ -61,8 +61,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x, // Type promotion Logic if (phi::NeedTypePromotion(x.dtype(), y.dtype())) { VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion " - "automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) + << "got different data type, run type protmotion " + "automatically, this may cause data type been changed."; auto op_name = phi::TransToFluidOpName("multiply"); auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype()); @@ -407,8 +408,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x, // Type promotion Logic if (phi::NeedTypePromotion(x.dtype(), y.dtype())) { VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion " - "automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) + << "got different data type, run type protmotion " + "automatically, this may cause data type been changed."; auto op_name = phi::TransToFluidOpName("multiply"); auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype()); diff --git a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py index 6001430546980..8555519778e3f 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py @@ -528,7 +528,7 @@ class {} : public egr::GradNodeBase {{ TYPE_PROMOTION_LOGIC_TEMPLATE = """ if (phi::NeedTypePromotion({x}.dtype(), {y}.dtype())) {{ VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) << "got different data type, run type protmotion automatically, this may cause data type been changed."; {op_name} auto promotion_type = phi::GetPromoteDtype(op_name, {x}.dtype(), {y}.dtype()); diff --git a/python/paddle/base/layers/math_op_patch.py b/python/paddle/base/layers/math_op_patch.py index d69c997816fcf..dbf23b5fff2ff 100644 --- a/python/paddle/base/layers/math_op_patch.py +++ b/python/paddle/base/layers/math_op_patch.py @@ -538,8 +538,10 @@ def __impl__(self, other_var): op_type, lhs_dtype, rhs_dtype ) warnings.warn( - f"The input dtypes of OP {op_type} are {lhs_dtype} and {rhs_dtype}, " - "the output will be auto-promoted to {common_dtype}" + f"The input dtypes of OP {op_type} are {lhs_dtype} and {rhs_dtype}, the output will be auto-promoted to {common_dtype}" + ) + warnings.filterwarnings( + "ignore", message="The input dtypes of OP" ) if rhs_dtype != common_dtype: other_var = astype(other_var, common_dtype)