Skip to content

Commit

Permalink
[clang-tidy] enable bugprone-exception-escape check (#56692)
Browse files Browse the repository at this point in the history
  • Loading branch information
gouzil authored Aug 28, 2023
1 parent c0f5dac commit dcaca0f
Show file tree
Hide file tree
Showing 24 changed files with 53 additions and 45 deletions.
2 changes: 1 addition & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ bugprone-argument-comment,
bugprone-copy-constructor-init,
-bugprone-dangling-handle,
-bugprone-dynamic-static-initializers,
-bugprone-exception-escape,
bugprone-exception-escape,
-bugprone-fold-init-type,
-bugprone-forwarding-reference-overload,
-bugprone-inaccurate-erase,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ Flatten::Flatten(const std::vector<DimTrans*>& dims)
all_dim_trans.emplace_back(this);
}

Flatten::~Flatten() {
Flatten::~Flatten() { // NOLINT
input_dims_.assign(input_dims_.size(), nullptr);
std::vector<DimTrans*>().swap(input_dims_);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/eager/auto_code_generator/eager_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3299,7 +3299,7 @@ static void DygraphCodeGeneration(const std::string& output_dir,
} // namespace framework
} // namespace paddle

int main(int argc, char* argv[]) {
int main(int argc, char* argv[]) { // NOLINT
if (argc != 3) {
std::cerr << "argc must be 3" << std::endl;
return -1;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/eager/pylayer/py_layer_node.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
#include "pybind11/pytypes.h"

namespace egr {
GradNodePyLayer::~GradNodePyLayer() {
GradNodePyLayer::~GradNodePyLayer() { // NOLINT
pybind11::gil_scoped_acquire gil;
Py_XDECREF(ctx_);
}
Expand Down
47 changes: 27 additions & 20 deletions paddle/fluid/framework/custom_operator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -916,11 +916,12 @@ static void RegisterOperatorKernel(
OperatorWithKernel::OpKernelFunc op_kernel_func;
if (kernel_func) {
VLOG(3) << "Register custom operator " << name << " with kernel func";
op_kernel_func = [kernel_func, inputs, outputs, attrs, inplace_map](
const framework::ExecutionContext& ctx) {
VLOG(3) << "Custom Operator: run custom kernel func in lambda.";
RunKernelFunc(ctx, kernel_func, inputs, outputs, attrs, inplace_map);
};
op_kernel_func =
[kernel_func, inputs, outputs, attrs, inplace_map]( // NOLINT
const framework::ExecutionContext& ctx) {
VLOG(3) << "Custom Operator: run custom kernel func in lambda.";
RunKernelFunc(ctx, kernel_func, inputs, outputs, attrs, inplace_map);
};
} else {
VLOG(3) << "Register custom operator " << name
<< " with raw op kernel func";
Expand Down Expand Up @@ -1027,12 +1028,12 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,
// InferShape
if (infer_shape_func == nullptr) {
// use default InferShape
info.infer_shape_ =
[op_inputs, op_outputs, op_inplace_map](InferShapeContext* ctx) {
RunDefaultInferShapeFunc(ctx, op_inputs, op_outputs, op_inplace_map);
};
info.infer_shape_ = [op_inputs, op_outputs, op_inplace_map]( // NOLINT
InferShapeContext* ctx) {
RunDefaultInferShapeFunc(ctx, op_inputs, op_outputs, op_inplace_map);
};
} else {
info.infer_shape_ = [op_inputs,
info.infer_shape_ = [op_inputs, // NOLINT
op_outputs,
op_attrs,
op_inplace_map,
Expand All @@ -1051,12 +1052,12 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,
// Infer Dtype
if (infer_dtype_func == nullptr) {
// use default InferDtype
info.infer_var_type_ =
[op_inputs, op_outputs, op_inplace_map](InferVarTypeContext* ctx) {
RunDefaultInferDtypeFunc(ctx, op_inputs, op_outputs, op_inplace_map);
};
info.infer_var_type_ = [op_inputs, op_outputs, op_inplace_map]( // NOLINT
InferVarTypeContext* ctx) {
RunDefaultInferDtypeFunc(ctx, op_inputs, op_outputs, op_inplace_map);
};
} else {
info.infer_var_type_ = [op_inputs,
info.infer_var_type_ = [op_inputs, // NOLINT
op_outputs,
op_attrs,
op_inplace_map,
Expand Down Expand Up @@ -1115,7 +1116,10 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,

// GradOpDescMaker
info.grad_op_maker_ =
[grad_op_name, grad_op_inputs, grad_op_outputs, is_double_grad](
[grad_op_name, // NOLINT
grad_op_inputs,
grad_op_outputs,
is_double_grad](
const OpDesc& fwd_op,
const std::unordered_set<std::string>& no_grad_set,
std::unordered_map<std::string, std::string>* grad_to_var,
Expand All @@ -1133,7 +1137,10 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,

// GradOpBaseMaker
info.dygraph_grad_op_maker_ =
[grad_op_name, grad_op_inputs, grad_op_outputs, is_double_grad](
[grad_op_name, // NOLINT
grad_op_inputs,
grad_op_outputs,
is_double_grad](
const std::string& type,
const imperative::NameVarBaseMap& var_base_map_in,
const imperative::NameVarBaseMap& var_base_map_out,
Expand Down Expand Up @@ -1173,7 +1180,7 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,

// Grad InferShape
if (grad_infer_shape_fn == nullptr) {
grad_info.infer_shape_ = [grad_op_inputs,
grad_info.infer_shape_ = [grad_op_inputs, // NOLINT
grad_op_outputs,
is_double_grad](InferShapeContext* ctx) {
// 1. if forward input exists, gradient's shape is same with forward
Expand Down Expand Up @@ -1211,7 +1218,7 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,
}
};
} else {
grad_info.infer_shape_ = [grad_op_inputs,
grad_info.infer_shape_ = [grad_op_inputs, // NOLINT
grad_op_outputs,
grad_op_attrs,
grad_op_inplace_map,
Expand All @@ -1230,7 +1237,7 @@ void RegisterOperatorWithMetaInfo(const std::vector<OpMetaInfo>& op_meta_infos,
// Grad InferDtype
if (grad_infer_dtype_fn != nullptr) {
grad_info.infer_var_type_ =
[grad_op_inputs,
[grad_op_inputs, // NOLINT
grad_op_outputs,
grad_op_attrs,
grad_op_inplace_map,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class SingleStreamGuard {
}
}

~SingleStreamGuard() {
~SingleStreamGuard() { // NOLINT
if (!is_changed) {
return;
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/scope.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ PADDLE_DEFINE_EXPORTED_bool(
namespace paddle {
namespace framework {

Scope::~Scope() { DropKids(); }
Scope::~Scope() { DropKids(); } // NOLINT

Scope& Scope::NewScope() const {
Scope* child = new Scope(this);
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/api/analysis_predictor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2581,7 +2581,7 @@ bool AnalysisPredictor::SaveTrtCalibToDisk() {
}
#endif

AnalysisPredictor::~AnalysisPredictor() {
AnalysisPredictor::~AnalysisPredictor() { // NOLINT
#ifdef PADDLE_WITH_TENSORRT
if (config_.tensorrt_engine_enabled() &&
config_.tensorrt_precision_mode_ == AnalysisConfig::Precision::kInt8 &&
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/memory/allocation/mmap_allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ void MemoryMapAllocationPool::Clear() {
memory_map_allocations_.clear();
}

MemoryMapAllocationPool::~MemoryMapAllocationPool() { Clear(); }
MemoryMapAllocationPool::~MemoryMapAllocationPool() { Clear(); } // NOLINT

} // namespace allocation
} // namespace memory
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/platform/profiler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -591,7 +591,7 @@ MemEvenRecorder::RecordMemEvent::RecordMemEvent(const Place &place,
PushMemEvent(start_ns_, end_ns_, bytes_, place_, alloc_in_);
}

MemEvenRecorder::RecordMemEvent::~RecordMemEvent() {
MemEvenRecorder::RecordMemEvent::~RecordMemEvent() { // NOLINT
phi::DeviceTracer *tracer = phi::GetDeviceTracer();
end_ns_ = PosixInNsec();

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/platform/profiler/chrometracing_logger.cc
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ ChromeTracingLogger::ChromeTracingLogger(const char* filename_cstr) {
StartLog();
}

ChromeTracingLogger::~ChromeTracingLogger() {
ChromeTracingLogger::~ChromeTracingLogger() { // NOLINT
EndLog();
output_file_stream_.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ std::unique_ptr<ProfilerResult> DeserializationReader::Parse() {
return std::unique_ptr<ProfilerResult>(profiler_result_ptr);
}

DeserializationReader::~DeserializationReader() {
DeserializationReader::~DeserializationReader() { // NOLINT
delete node_trees_proto_;
input_file_stream_.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ SerializationLogger::SerializationLogger(const char* filename_cstr) {
OpenFile();
}

SerializationLogger::~SerializationLogger() {
SerializationLogger::~SerializationLogger() { // NOLINT
if (!output_file_stream_) {
delete node_trees_proto_;
return;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/pybind/eager_functions.cc
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ class EagerNumpyAllocation : public phi::Allocation {
"The underlying PyObject pointer of numpy array cannot be None"));
Py_INCREF(arr_);
}
~EagerNumpyAllocation() override {
~EagerNumpyAllocation() override { // NOLINT
py::gil_scoped_acquire gil;
Py_DECREF(arr_);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/pybind/eager_legacy_op_function_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ GenerateOpFunctions() {
return std::make_tuple(op_function_list, bind_function_list);
}

int main(int argc, char* argv[]) {
int main(int argc, char* argv[]) { // NOLINT
if (argc != 2) {
std::cerr << "argc must be 2" << std::endl;
return -1;
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/pybind/eager_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1834,7 +1834,7 @@ void PyVoidHook::operator()() {

PyObjectHolder::PyObjectHolder(PyObject* ptr) { ptr_ = ptr; }

PyObjectHolder::~PyObjectHolder() {
PyObjectHolder::~PyObjectHolder() { // NOLINT
::pybind11::gil_scoped_acquire gil;
Py_XDECREF(ptr_);
}
Expand All @@ -1860,7 +1860,7 @@ void PyObjectHolder::dec_ref() {

PackHook::PackHook(PyObject* hook) : hook_(hook) { Py_INCREF(hook_); }

PackHook::~PackHook() {
PackHook::~PackHook() { // NOLINT
::pybind11::gil_scoped_acquire gil;
Py_DECREF(hook_);
}
Expand Down Expand Up @@ -1899,7 +1899,7 @@ void* PackHook::operator()(void* py_tensor) {

UnPackHook::UnPackHook(PyObject* hook) : hook_(hook) { Py_INCREF(hook_); }

UnPackHook::~UnPackHook() {
UnPackHook::~UnPackHook() { // NOLINT
::pybind11::gil_scoped_acquire gil;
Py_DECREF(hook_);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/pybind/imperative.cc
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class PyVariableWrapperHook : public imperative::VariableWrapperHook {
Py_INCREF(py_func_);
}

~PyVariableWrapperHook() override {
~PyVariableWrapperHook() override { // NOLINT
py::gil_scoped_acquire gil;
Py_DECREF(py_func_);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/pybind/kernel_signature_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
// },
// ...
// }
int main(int argc, char **argv) {
int main(int argc, char **argv) { // NOLINT
paddle::framework::InitDefaultKernelSignatureMap();
auto &kernel_signature_map = phi::DefaultKernelSignatureMap::Instance();
auto &kernel_factory = phi::KernelFactory::Instance();
Expand Down
2 changes: 1 addition & 1 deletion paddle/ir/core/storage_manager.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ struct ParametricStorageManager {
explicit ParametricStorageManager(std::function<void(StorageBase *)> destroy)
: destroy_(destroy) {}

~ParametricStorageManager() {
~ParametricStorageManager() { // NOLINT
for (const auto &instance : parametric_instances_) {
destroy_(instance.second);
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/core/distributed/store/tcp_store.cc
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ MasterDaemon::MasterDaemon(SocketType socket, int nranks, int timeout)
_background_thread = std::thread{&MasterDaemon::run, this};
}

MasterDaemon::~MasterDaemon() {
MasterDaemon::~MasterDaemon() { // NOLINT
VLOG(4) << ("begin to destruct MasterDaemon");
StopByControlFd();
_background_thread.join();
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/core/sparse_coo_tensor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ SparseCooTensor::SparseCooTensor() {
this->SetMember(non_zero_indices, non_zero_elements, {1}, true);
}

SparseCooTensor::SparseCooTensor(SparseCooTensor&& other) {
SparseCooTensor::SparseCooTensor(SparseCooTensor&& other) { // NOLINT
this->non_zero_elements_ = other.non_zero_elements_;
this->non_zero_indices_ = other.non_zero_indices_;
this->coalesced_ = other.coalesced_;
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/core/string_tensor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ StringTensor& StringTensor::operator=(const StringTensor& other) {
return *this;
}

StringTensor& StringTensor::operator=(StringTensor&& other) {
StringTensor& StringTensor::operator=(StringTensor&& other) { // NOLINT
meta_ = std::move(other.meta_);
std::swap(holder_, other.holder_);
return *this;
Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/core/tensor_meta.cc
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,8 @@ DenseTensorMeta& DenseTensorMeta::operator=(const DenseTensorMeta& other) {
return *this;
}

DenseTensorMeta& DenseTensorMeta::operator=(DenseTensorMeta&& other) {
DenseTensorMeta& DenseTensorMeta::operator=( // NOLINT
DenseTensorMeta&& other) {
is_scalar = other.is_scalar;
use_gpudnn = other.use_gpudnn;
dims = std::move(other.dims);
Expand Down
2 changes: 1 addition & 1 deletion paddle/testing/paddle_gtest_main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ limitations under the License. */
DECLARE_bool(enable_gpu_memory_usage_log);
#endif

int main(int argc, char** argv) {
int main(int argc, char** argv) { // NOLINT
paddle::memory::allocation::UseAllocatorStrategyGFlag();
testing::InitGoogleTest(&argc, argv);
std::vector<char*> new_argv;
Expand Down

0 comments on commit dcaca0f

Please sign in to comment.