Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
fix amalgamation (#184)
Browse files Browse the repository at this point in the history
  • Loading branch information
eric-haibin-lin authored Aug 22, 2017
1 parent 7d525f8 commit aa04cd2
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/operator/deconvolution-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ class DeconvolutionOp : public Operator {
if (!param_.no_bias) {
// add bias, broadcast bias to dim 1: channel
Tensor<xpu, 1, DType> bias = in_data[deconv::kBias].get<xpu, 1, DType>(s);
out += broadcast<1>(bias, out.shape_);
out += mshadow::expr::broadcast<1>(bias, out.shape_);
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/operator/leaky_relu-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ class LeakyReLUOp : public Operator {
case leakyrelu::kPReLU: {
weight = in_data[leakyrelu::kGamma].get<xpu, 1, real_t>(s);
Assign(out, req[leakyrelu::kOut],
F<mshadow_op::xelu>(data, broadcast<1>(weight, out.shape_)));
F<mshadow_op::xelu>(data, mshadow::expr::broadcast<1>(weight, out.shape_)));
break;
}
case leakyrelu::kRReLU: {
Expand Down Expand Up @@ -177,7 +177,7 @@ class LeakyReLUOp : public Operator {
weight = in_data[leakyrelu::kGamma].get<xpu, 1, real_t>(s);
grad_weight = in_grad[leakyrelu::kGamma].get<xpu, 1, real_t>(s);
grad_weight = sumall_except_dim<1>(F<prelu_grad>(data) * grad);
gdata = F<mshadow_op::xelu_grad>(data, broadcast<1>(weight, data.shape_)) * grad;
gdata = F<mshadow_op::xelu_grad>(data, mshadow::expr::broadcast<1>(weight, data.shape_)) * grad;
break;
}
case leakyrelu::kRReLU: {
Expand Down

0 comments on commit aa04cd2

Please sign in to comment.