Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Provide a failing test for ReLU activation shape inference bug
Browse files Browse the repository at this point in the history
  • Loading branch information
larroy committed Nov 26, 2018
1 parent cb627bc commit 597f9e5
Showing 1 changed file with 14 additions and 6 deletions.
20 changes: 14 additions & 6 deletions tests/cpp/operator/activation_perf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,21 @@ const kwargs_t basic_activation_args = { };
* \brief Generic bidirectional sanity test
*/
TEST(ACTIVATION_PERF, ExecuteBidirectional) {
using namespace std;
TShape shape({5, 5});
kwargs_t kwargs = basic_activation_args;
kwargs.push_back({"act_type", "tanh"});

test::op::CoreOperatorRunner<float> runner;
runner.RunBidirectional(false, { shape }, test::op::CoreOpExecutor<float>::ArgsWithOpName(
kwargs, "Activation", "_backward_Activation"), 1);
vector<string> activations = {
"relu",
"sigmoid",
"tanh",
"softrelu",
"softsign"
};
for(const string& activation: activations) {
kwargs_t activation_args = {{"act_type", activation}};
test::op::CoreOperatorRunner<float> runner;
runner.RunBidirectional(false, { shape }, test::op::CoreOpExecutor<float>::ArgsWithOpName(
activation_args, "Activation", "_backward_Activation"), 1);
}
}

/*!
Expand Down

0 comments on commit 597f9e5

Please sign in to comment.