Skip to content

Commit

Permalink
save
Browse files Browse the repository at this point in the history
  • Loading branch information
MarisaKirisame committed Aug 24, 2020
1 parent 6201c5c commit fff05bd
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/relay/transforms/gradient.cc
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ Expr LiftTensor(const std::function<Expr(const Expr& t)>& f,
if (forward_type.as<TensorTypeNode>()) {
auto ret = ll->Push(f(e));
ret->checked_type_ = tf(forward_type);
return ret;
return std::move(ret);
} else if (auto* tt = forward_type.as<TupleTypeNode>()) {
tvm::Array<Expr> fields;
tvm::Array<Type> types;
Expand All @@ -368,7 +368,7 @@ Expr LiftTensor(const std::function<Expr(const Expr& t)>& f,
}
auto ret = ll->Push(Tuple(fields));
ret->checked_type_ = TupleType(types);
return ret;
return std::move(ret);
} else {
LOG(FATAL) << "unsupported input/output type: " << tt;
throw;
Expand Down Expand Up @@ -459,7 +459,7 @@ struct ReverseAD : ExprMutator {
// memoize Var -> ADVar so we don't end up with free Vars when checkpointing
auto var_ref = GetRef<Var>(var);
if (ad_vars->count(var_ref) == 0) {
return var_ref;
return std::move(var_ref);
} else {
return GetValue(var_ref->checked_type(), ad_vars->at(var_ref), ll);
}
Expand Down Expand Up @@ -648,7 +648,7 @@ Expr Gradient(const Expr& re, const Optional<IRModule>& mod) {
});
auto ret = Function(f->params, body, GradRetType(GetRef<Function>(f)), {});
CheckFeature(ret, FeatureSet::All() - fGraph);
return ret;
return std::move(ret);
}

TVM_REGISTER_GLOBAL("relay._transform.gradient").set_body_typed(Gradient);
Expand Down

0 comments on commit fff05bd

Please sign in to comment.