Skip to content

Commit

Permalink
move activation sigmoid
Browse files Browse the repository at this point in the history
  • Loading branch information
YuanRisheng committed Mar 16, 2022
1 parent 2f5fb03 commit 5f1ec74
Show file tree
Hide file tree
Showing 16 changed files with 563 additions and 608 deletions.
2 changes: 1 addition & 1 deletion paddle/fluid/eager/tests/task_tests/generated_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,6 @@ TEST(Generated, ElementwiseAdd) {

} // namespace egr

USE_OP(sigmoid);
USE_OP_ITSELF(sigmoid);
USE_OP_ITSELF(elementwise_add);
USE_OP_ITSELF(matmul_v2);
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,6 @@ TEST(Hook_intermidiate, Matmul_v2) {
}
} // namespace egr

USE_OP(sigmoid);
USE_OP_ITSELF(sigmoid);
USE_OP_ITSELF(elementwise_add);
USE_OP_ITSELF(matmul_v2);
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ USE_OP(slice);
USE_OP(concat);
USE_OP(matmul);
USE_OP_ITSELF(elementwise_add);
USE_OP(sigmoid);
USE_OP_ITSELF(sigmoid);
USE_OP_ITSELF(tanh);
USE_OP(elementwise_mul);
USE_OP(softmax_with_cross_entropy);
Expand All @@ -47,7 +47,7 @@ USE_OP(square);
USE_OP(transpose2_grad);
USE_OP(concat_grad);
USE_OP_ITSELF(elementwise_mul_grad);
USE_OP(sigmoid_grad);
USE_OP_ITSELF(sigmoid_grad);
USE_OP_ITSELF(tanh_grad);
USE_OP(sum);
USE_OP(slice_grad);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,6 @@ TEST(Relu6OpConverter, main) { test_activation("relu6"); }
} // namespace paddle

USE_OP_ITSELF(relu);
USE_OP(sigmoid);
USE_OP_ITSELF(sigmoid);
USE_OP_ITSELF(tanh);
USE_OP(relu6);
28 changes: 4 additions & 24 deletions paddle/fluid/operators/activation_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1492,6 +1492,10 @@ REGISTER_ACTIVATION_OP(softshrink, SoftShrink, SoftShrinkFunctor,
REGISTER_ACTIVATION_OP(tanh_shrink, TanhShrink, TanhShrinkFunctor,
TanhShrinkGradFunctor);
REGISTER_ACTIVATION_OP(silu, Silu, SiluFunctor, SiluGradFunctor);
REGISTER_ACTIVATION_OP(hard_sigmoid, HardSigmoid, HardSigmoidFunctor,
HardSigmoidGradFunctor);
REGISTER_ACTIVATION_OP(logsigmoid, LogSigmoid, LogSigmoidFunctor,
LogSigmoidGradFunctor);

/* ========================== sigmoid register =============================
*/
Expand Down Expand Up @@ -1526,30 +1530,6 @@ REGISTER_OPERATOR(sigmoid_triple_grad,
ops::SigmoidTripleGradFunctor<float>::FwdDeps()>,
ops::ActivationTripleGradOpInplaceInferer);

// Register Sigmoid/GradSigmoid Kernels
REGISTER_ACTIVATION_CPU_KERNEL(sigmoid, Sigmoid, SigmoidFunctor,
SigmoidGradFunctor);

// Register DoubleGrad Kernel
REGISTER_OP_CPU_KERNEL(
sigmoid_grad_grad,
ops::SigmoidDoubleGradKernel<plat::CPUDeviceContext,
ops::SigmoidGradGradFunctor<float>>,
ops::SigmoidDoubleGradKernel<plat::CPUDeviceContext,
ops::SigmoidGradGradFunctor<double>>,
ops::SigmoidDoubleGradKernel<plat::CPUDeviceContext,
ops::SigmoidGradGradFunctor<plat::float16>>);

// Register TripleGrad Kernel
REGISTER_OP_CPU_KERNEL(
sigmoid_triple_grad,
ops::SigmoidTripleGradKernel<plat::CPUDeviceContext,
ops::SigmoidTripleGradFunctor<float>>,
ops::SigmoidTripleGradKernel<plat::CPUDeviceContext,
ops::SigmoidTripleGradFunctor<double>>,
ops::SigmoidTripleGradKernel<plat::CPUDeviceContext,
ops::SigmoidTripleGradFunctor<plat::float16>>);

/* ========================================================================== */

/* ========================== tanh register ============================= */
Expand Down
Loading

1 comment on commit 5f1ec74

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.