From 607cbd34cdfb8f5aa2c157c76870ba20ef7cda50 Mon Sep 17 00:00:00 2001 From: Tianqi Chen Date: Wed, 13 Sep 2017 12:47:25 -0700 Subject: [PATCH] [TOP] Add comments about optimizable ops (#10) --- nnvm/src/top/nn/nn.cc | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/nnvm/src/top/nn/nn.cc b/nnvm/src/top/nn/nn.cc index 3dc439103b1bb..ee05b6be57a5f 100644 --- a/nnvm/src/top/nn/nn.cc +++ b/nnvm/src/top/nn/nn.cc @@ -163,6 +163,9 @@ by:: The parameter ``axis`` specifies which axis of the input shape denotes the 'channel' (separately normalized groups). The default is 1. Specifying -1 sets the channel axis to be the last item in the input shape. + +.. note:: + This operator can be optimized away for inference. )" NNVM_ADD_FILELINE) .add_argument("data", "Tensor", "Input to which dropout will be applied") .add_argument("gamma", "Tensor", "The gamma scale factor") @@ -197,6 +200,8 @@ NNVM_REGISTER_OP(softmax) .. math:: \text{softmax}(x)_i = \frac{exp(x_i)}{\sum_j exp(x_j)} +.. note:: + This operator can be optimized away for inference. )code" NNVM_ADD_FILELINE) .set_num_inputs(1) .set_num_outputs(1) @@ -208,10 +213,12 @@ NNVM_REGISTER_OP(softmax) // log_softmax NNVM_REGISTER_OP(log_softmax) -.describe(R"code(Computes softmax. +.describe(R"code(Computes log softmax. .. math:: \text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)} +.. note:: + This operator can be optimized away for inference. )code" NNVM_ADD_FILELINE) .set_num_inputs(1) .set_num_outputs(1)