From b1c5820b3fe2361d416bc1cab95b457d3e300448 Mon Sep 17 00:00:00 2001 From: zhoukunsheng Date: Fri, 12 Apr 2019 15:09:58 +0800 Subject: [PATCH] fix merge conflict --- paddle/fluid/operators/activation_op.cc | 2 -- paddle/fluid/operators/activation_op.h | 5 +++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index c53427b465bc3..348902c656cec 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -602,7 +602,6 @@ REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc); namespace ops = paddle::operators; - #define REGISTER_ACTIVATION_OP(KERNEL_TYPE, OP_NAME, functor, grad_functor) \ REGISTER_OPERATOR( \ KERNEL_TYPE, ops::ActivationOp, ops::OP_NAME##OpMaker, \ @@ -619,7 +618,6 @@ namespace ops = paddle::operators; #define REGISTER_ACTIVATION_CPU_KERNEL(act_type, op_name, functor, \ grad_functor) \ - REGISTER_OP_CPU_KERNEL( \ act_type, ops::ActivationKernel>, \ diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index e50f3bf766d13..1732f61582f79 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -36,7 +36,6 @@ limitations under the License. */ namespace paddle { namespace operators { - enum ActBwdOpFwdDeps { kNoDeps = 0x00, // Do not need any forward input/output kDepX = 0x01, // Only need forward input X @@ -528,6 +527,8 @@ struct RsqrtGradFunctor : public BaseActivationFunctor { void operator()(Device d, X x, Out out, dOut dout, dX dx) const { dx.device(d) = static_cast(-0.5) * dout * out * out * out; } + + static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepOut; } }; // ceil(x) = ceiling(x) @@ -1200,7 +1201,6 @@ struct SwishGradFunctor : public BaseActivationFunctor { } // namespace operators } // namespace paddle - #define FOR_EACH_ACTIVATION_OP(__macro) \ __macro(sigmoid, Sigmoid, SigmoidFunctor, SigmoidGradFunctor); \ __macro(logsigmoid, LogSigmoid, LogSigmoidFunctor, LogSigmoidGradFunctor); \ @@ -1211,6 +1211,7 @@ struct SwishGradFunctor : public BaseActivationFunctor { __macro(atan, Atan, AtanFunctor, AtanGradFunctor); \ __macro(softshrink, SoftShrink, SoftShrinkFunctor, SoftShrinkGradFunctor); \ __macro(sqrt, Sqrt, SqrtFunctor, SqrtGradFunctor); \ + __macro(rsqrt, Rsqrt, RsqrtFunctor, RsqrtGradFunctor); \ __macro(abs, Abs, AbsFunctor, AbsGradFunctor); \ __macro(ceil, Ceil, CeilFunctor, ZeroGradFunctor); \ __macro(floor, Floor, FloorFunctor, ZeroGradFunctor); \