Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[MXNET-860] Use emplace where helpful (#12694)
Browse files Browse the repository at this point in the history
* [MXNET-860] Use emplace where helpful

* [MXNET-860] Add emplace as an error in clang-tidy
  • Loading branch information
KellenSunderland authored and eric-haibin-lin committed Oct 3, 2018
1 parent 5c86143 commit bcd24f8
Show file tree
Hide file tree
Showing 14 changed files with 29 additions and 29 deletions.
3 changes: 2 additions & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ Checks: >

# In order to trigger an error, you must have a rule defined both in checks and in this section.
WarningsAsErrors: >
cppcoreguidelines-no-malloc, modernize-use-nullptr, performance-unnecessary-copy-initialization
cppcoreguidelines-no-malloc, modernize-use-nullptr, performance-unnecessary-copy-initialization,
modernize-use-emplace
# Todo: define a better regex match that includes most project headers, but excludes third party
# code.
Expand Down
2 changes: 1 addition & 1 deletion src/c_api/c_api_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ int MXExecutorBindEX(SymbolHandle symbol_handle,
for (mx_uint i = 0; i < len; ++i) {
in_args_vec.push_back(*(in_args_ptr[i]));
if (arg_grad_ptr[i] == nullptr) {
arg_grad_vec.push_back(NDArray());
arg_grad_vec.emplace_back();
grad_req_vec.push_back(kNullOp);
} else {
arg_grad_vec.push_back(*(arg_grad_ptr[i]));
Expand Down
2 changes: 1 addition & 1 deletion src/c_api/c_api_ndarray.cc
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ int MXCreateCachedOpEx(SymbolHandle handle,
API_BEGIN();
std::vector<std::pair<std::string, std::string> > flags;
for (int i = 0; i < num_flags; ++i) {
flags.push_back({keys[i], vals[i]});
flags.emplace_back(keys[i], vals[i]);
}
*out = new CachedOpPtr(new CachedOp(*sym, flags));
API_END();
Expand Down
4 changes: 2 additions & 2 deletions src/c_api/c_predict_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ int _CreatePartialOut(const char* symbol_json_str,
if (known_shape.count(key) != 0) {
in_shapes.push_back(known_shape[key]);
} else {
in_shapes.push_back(TShape());
in_shapes.emplace_back();
}
}
nnvm::Graph g; g.outputs = sym.outputs;
Expand Down Expand Up @@ -369,7 +369,7 @@ int MXPredReshape(mx_uint num_input_nodes,
if (new_shape.count(key) != 0) {
in_shapes.push_back(new_shape[key]);
} else {
in_shapes.push_back(TShape());
in_shapes.emplace_back();
}
}
nnvm::Graph g; g.outputs = ret->sym.outputs;
Expand Down
6 changes: 3 additions & 3 deletions src/io/iter_mnist.cc
Original file line number Diff line number Diff line change
Expand Up @@ -124,11 +124,11 @@ class MNISTIter: public IIterator<TBlobBatch> {
batch_label_.dptr_ = &labels_[loc_];
out_.data.clear();
if (param_.flat) {
out_.data.push_back(TBlob(batch_data_.FlatTo2D()));
out_.data.emplace_back(batch_data_.FlatTo2D());
} else {
out_.data.push_back(TBlob(batch_data_));
out_.data.emplace_back(batch_data_);
}
out_.data.push_back(TBlob(batch_label_));
out_.data.emplace_back(batch_label_);
loc_ += param_.batch_size;
return true;
} else {
Expand Down
8 changes: 4 additions & 4 deletions src/kvstore/comm_tree.h
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ class CommDeviceTree : public CommDevice {
// 2) Force copy_buf to be of kRecvBufferSize
// 3) Do not use greedy assignment; all keys are assigned to each GPU
for (unsigned i = 0; i < devs_.size(); ++i)
tree_merge_buf_.push_back(std::unordered_map<int, TreeBufferEntry>());
tree_merge_buf_.emplace_back();

bool delay_alloc = true;
std::map<int, int> key_dist;
Expand Down Expand Up @@ -457,7 +457,7 @@ class CommDeviceTree : public CommDevice {
if (row == devs_.size()-1)
shape_copy[0] = last_slice;
buf.merged[row] = NDArray(shape_copy, ctx, delay_alloc, type);
buf.copy_buf.push_back(std::vector<NDArray>());
buf.copy_buf.emplace_back();
if (buf.copy_buf[row].empty()) {
buf.copy_buf[row].resize(kBranch-1);
for (size_t col = 0; col < buf.copy_buf[0].size(); ++col) {
Expand All @@ -469,9 +469,9 @@ class CommDeviceTree : public CommDevice {
}
}
} else {
buf.merged.push_back(NDArray(shape, ctx, false, type));
buf.merged.emplace_back(shape, ctx, false, type);
if (buf.copy_buf.empty()) {
buf.copy_buf.push_back(std::vector<NDArray>());
buf.copy_buf.emplace_back();
buf.copy_buf[0].resize(kBranch-1);
for (size_t col = 0; col < buf.copy_buf[0].size(); ++col) {
buf.copy_buf[0][col] = NDArray(buf.merged[0].shape(),
Expand Down
2 changes: 1 addition & 1 deletion src/nnvm/legacy_json_util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ Graph UpgradeJSON_FixParsing(Graph g) {
for (const auto key : kHiddenKeys) {
size_t pos = it->first.rfind(key);
if (pos == 0 || (pos != std::string::npos && pos == it->first.length() - key.length())) {
hidden_keys.push_back(*it);
hidden_keys.emplace_back(*it);
erase = true;
break;
}
Expand Down
3 changes: 1 addition & 2 deletions src/nnvm/legacy_op_util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,7 @@ std::vector<std::pair<int, int> > OpPropInplaceOption(const NodeAttrs& attrs) {
}
std::vector<std::pair<int, int> > forward_inplace;
for (auto& kv : prop.ptr->ForwardInplaceOption(in_data, out_addr)) {
forward_inplace.push_back(
std::make_pair(kv.first, *static_cast<int*>(kv.second)));
forward_inplace.emplace_back(kv.first, *static_cast<int*>(kv.second));
}
return forward_inplace;
}
Expand Down
12 changes: 6 additions & 6 deletions src/operator/control_flow.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1268,7 +1268,7 @@ NNVM_REGISTER_OP(_foreach)
[](const NodeAttrs& attrs) {
const ForeachParam& params = nnvm::get<ForeachParam>(attrs.parsed);
std::vector<std::string> names;
names.push_back("fn");
names.emplace_back("fn");
for (int i = 0; i < params.num_args - 1; i++)
names.push_back("data" + std::to_string(i));
return names;
Expand Down Expand Up @@ -1330,8 +1330,8 @@ NNVM_REGISTER_OP(_while_loop)
const WhileLoopParam& params = nnvm::get<WhileLoopParam>(attrs.parsed);
std::vector<std::string> names;
names.reserve(params.num_args);
names.push_back("cond");
names.push_back("func");
names.emplace_back("cond");
names.emplace_back("func");
for (int i = 2; i < params.num_args; i++)
names.push_back("data" + std::to_string(i - 2));
return names;
Expand Down Expand Up @@ -1392,9 +1392,9 @@ NNVM_REGISTER_OP(_cond)
const CondParam& params = nnvm::get<CondParam>(attrs.parsed);
std::vector<std::string> names;
names.reserve(params.num_args);
names.push_back("cond");
names.push_back("then_branch");
names.push_back("else_branch");
names.emplace_back("cond");
names.emplace_back("then_branch");
names.emplace_back("else_branch");
for (int i = 3; i < params.num_args; ++i)
names.push_back("data" + std::to_string(i - 3));
return names;
Expand Down
2 changes: 1 addition & 1 deletion src/operator/custom/custom.cc
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ std::vector<std::string> List(const NodeAttrs& attrs) {
&args, params.info->contexts[Type]));
std::vector<std::string> ret;
for (int i = 0; args[i] != nullptr; ++i) {
ret.push_back(args[i]);
ret.emplace_back(args[i]);
}
return ret;
}
Expand Down
4 changes: 2 additions & 2 deletions src/operator/custom/native_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ class NativeOpProp : public OperatorProperty {
param_.pinfo->list_arguments(&args, param_.pinfo->p_list_arguments);
std::vector<std::string> ret;
for (int i = 0; args[i] != NULL; ++i) {
ret.push_back(args[i]);
ret.emplace_back(args[i]);
}
return ret;
}
Expand All @@ -195,7 +195,7 @@ class NativeOpProp : public OperatorProperty {
param_.pinfo->list_outputs(&args, param_.pinfo->p_list_outputs);
std::vector<std::string> ret;
for (int i = 0; args[i] != NULL; ++i) {
ret.push_back(args[i]);
ret.emplace_back(args[i]);
}
return ret;
}
Expand Down
4 changes: 2 additions & 2 deletions src/operator/custom/ndarray_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class NDArrayOpProp : public OperatorProperty {
CHECK(param_.pinfo->list_arguments(&args, param_.pinfo->p_list_arguments));
std::vector<std::string> ret;
for (int i = 0; args[i] != NULL; ++i) {
ret.push_back(args[i]);
ret.emplace_back(args[i]);
}
return ret;
}
Expand All @@ -97,7 +97,7 @@ class NDArrayOpProp : public OperatorProperty {
CHECK(param_.pinfo->list_outputs(&args, param_.pinfo->p_list_outputs));
std::vector<std::string> ret;
for (int i = 0; args[i] != NULL; ++i) {
ret.push_back(args[i]);
ret.emplace_back(args[i]);
}
return ret;
}
Expand Down
2 changes: 1 addition & 1 deletion src/operator/optimizer_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ only the row slices whose indices appear in grad.indices are updated (for both w
[](const NodeAttrs& attrs, const int dev_mask, const DispatchMode dispatch_mode) {
std::vector<ResourceRequest> request;
if (dispatch_mode == DispatchMode::kFComputeEx) {
request.push_back(ResourceRequest::kTempSpace);
request.emplace_back(ResourceRequest::kTempSpace);
}
return request;
})
Expand Down
4 changes: 2 additions & 2 deletions src/operator/rnn-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -608,9 +608,9 @@ class RNNProp : public OperatorProperty {
if (!param_.state_outputs)
return outputs;
else
outputs.push_back("state");
outputs.emplace_back("state");
if (param_.mode == rnn_enum::kLstm)
outputs.push_back("state_cell");
outputs.emplace_back("state_cell");
return outputs;
}

Expand Down

0 comments on commit bcd24f8

Please sign in to comment.