Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[clang-tidy] NO.16 enable modernize-use-equals-default #55983

Merged
merged 2 commits into from
Aug 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ modernize-redundant-void-arg,
-modernize-unary-static-assert,
-modernize-use-bool-literals,
modernize-use-emplace,
-modernize-use-equals-default,
modernize-use-equals-default,
-modernize-use-equals-delete,
-modernize-use-noexcept,
modernize-use-nullptr,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ DistTensorSpec::DistTensorSpec(const DistTensorSpec& spec) {
dist_attr_.copy_from(spec.dist_attr());
}

DistTensorSpec::~DistTensorSpec() {}
DistTensorSpec::~DistTensorSpec() = default;

DistTensorSpec::DistTensorSpec(const Tensor& tensor) {
shape_ = tensor.shape();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ ProcessGroupNCCL::NCCLTask::NCCLTask(const Place& place,
comm_event_(place),
task_place_(place) {}

ProcessGroupNCCL::NCCLTask::~NCCLTask() {}
ProcessGroupNCCL::NCCLTask::~NCCLTask() = default;

bool ProcessGroupNCCL::NCCLTask::IsCompleted() { return comm_event_.Query(); }

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/fleet_executor/fleet_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ FleetExecutor::FleetExecutor(const FleetExecutorDesc& exe_desc)
InitMessageBus();
}

FleetExecutor::~FleetExecutor() {
FleetExecutor::~FleetExecutor() { // NOLINT
for (const auto& carrier_id : carrier_ids_) {
GlobalMap<std::string, Carrier>::Get(carrier_id)->Release();
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/fleet_executor/interceptor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ namespace distributed {
Interceptor::Interceptor(int64_t interceptor_id, TaskNode* node)
: interceptor_id_(interceptor_id), node_(node) {}

Interceptor::~Interceptor() {
Interceptor::~Interceptor() { // NOLINT
// FIXME(wangxi): throw in stop function
// std::lock_guard<std::mutex> lock(mutex_);
// PADDLE_ENFORCE_EQ(messages_.empty(), true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ inline double GetCurrentUS() {
return 1e+6 * time.tv_sec + time.tv_usec;
}

Communicator::Communicator() {}
Communicator::Communicator() = default;

void Communicator::InitGFlag(const std::string &gflags) {
VLOG(3) << "Init With Gflags:" << gflags;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/ps/table/common_graph_table.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1205,7 +1205,7 @@ Node *GraphShard::find_node(uint64_t id) {
return iter == node_location.end() ? nullptr : bucket[iter->second];
}

GraphTable::~GraphTable() {
GraphTable::~GraphTable() { // NOLINT
#ifdef PADDLE_WITH_GPU_GRAPH
clear_graph();
#endif
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/data_feed.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1999,7 +1999,7 @@ void PaddleBoxDataFeed::PutToFeedVec(const std::vector<Record*>& ins_vec) {
#endif
}

SlotRecordInMemoryDataFeed::~SlotRecordInMemoryDataFeed() {
SlotRecordInMemoryDataFeed::~SlotRecordInMemoryDataFeed() { // NOLINT
#if defined(PADDLE_WITH_CUDA) && defined(PADDLE_WITH_HETERPS)
stop_token_.store(true);
for (auto& thread : pack_threads_) {
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/data_feed.h
Original file line number Diff line number Diff line change
Expand Up @@ -1821,7 +1821,7 @@ class MultiSlotInMemoryDataFeed : public InMemoryDataFeed<Record> {

class SlotRecordInMemoryDataFeed : public InMemoryDataFeed<SlotRecord> {
public:
SlotRecordInMemoryDataFeed() {}
SlotRecordInMemoryDataFeed() = default;
virtual ~SlotRecordInMemoryDataFeed();
void Init(const DataFeedDesc& data_feed_desc) override;
void LoadIntoMemory() override;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/fetch_async_op_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ FetchAsyncOpHandle::FetchAsyncOpHandle(ir::Node *node,
local_exec_scopes_(local_exec_scopes),
return_merged_(return_merged) {}

FetchAsyncOpHandle::~FetchAsyncOpHandle() {}
FetchAsyncOpHandle::~FetchAsyncOpHandle() = default;

void FetchAsyncOpHandle::RecordWaitEventOnCtx(
platform::DeviceContext *waited_ctx) {
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/fetch_op_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ FetchOpHandle::FetchOpHandle(ir::Node *node,
local_exec_scopes_(local_exec_scopes),
return_merged_(return_merged) {}

FetchOpHandle::~FetchOpHandle() {}
FetchOpHandle::~FetchOpHandle() = default;

void FetchOpHandle::RecordWaitEventOnCtx(platform::DeviceContext *waited_ctx) {
PADDLE_THROW(platform::errors::PermissionDenied(
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/nan_inf_utils_detail.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ namespace paddle {
namespace framework {
namespace details {
struct DebugTools {
DebugTools() {}
DebugTools() = default;
std::string path = "";
int stack_limit = 1;
};
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/op_handle_base.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ std::string OpHandleBase::DebugString() const {
return ss.str();
}

OpHandleBase::~OpHandleBase() PADDLE_MAY_THROW {
OpHandleBase::~OpHandleBase() PADDLE_MAY_THROW { // NOLINT
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
for (auto &ev : events_) {
if (ev.second) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ ScaleLossGradOpHandle::ScaleLossGradOpHandle(ir::Node *node,
this->SetDeviceContext(place_, dev_ctx);
}

ScaleLossGradOpHandle::~ScaleLossGradOpHandle() {}
ScaleLossGradOpHandle::~ScaleLossGradOpHandle() = default;

struct ScaleLossGradFunctor {
float coeff_;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/ssa_graph_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
namespace paddle {
namespace framework {
namespace details {
SSAGraphExecutor::~SSAGraphExecutor() {}
SSAGraphExecutor::~SSAGraphExecutor() = default;

void ClearFetchOp(ir::Graph* graph, std::vector<OpHandleBase*>* fetch_ops) {
if (fetch_ops->empty()) return;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/details/var_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ namespace paddle {
namespace framework {
namespace details {

VarHandleBase::~VarHandleBase() {}
VarHandleBase::~VarHandleBase() = default;

VarHandle::~VarHandle() { VLOG(4) << "deleting var handle " << DebugString(); }

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ namespace paddle {
namespace framework {
namespace ir {

AdaptivePool2dConvertGlobalPass::AdaptivePool2dConvertGlobalPass() {
AdaptivePool2dConvertGlobalPass::AdaptivePool2dConvertGlobalPass() { // NOLINT
AddOpCompat(OpCompat("pool2d"))
.AddInput("X")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Graph;
class AdaptivePool2dConvertGlobalPass : public FusePassBase {
public:
AdaptivePool2dConvertGlobalPass();
virtual ~AdaptivePool2dConvertGlobalPass() {}
virtual ~AdaptivePool2dConvertGlobalPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/constant_folding_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ struct ConstantFolding : public PatternBase {
};
} // namespace patterns

ConstantFoldingPass::ConstantFoldingPass() {}
ConstantFoldingPass::ConstantFoldingPass() = default;

void ConstantFoldingPass::ApplyImpl(ir::Graph *graph) const {
PADDLE_ENFORCE_NOT_NULL(
Expand Down
7 changes: 4 additions & 3 deletions paddle/fluid/framework/ir/conv_bn_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -756,7 +756,7 @@ void ConvEltwiseAddBNFusePass::ApplyImpl(ir::Graph* graph) const {
AddStatis(found_conv_bn_count);
}

ConvTransposeBNFusePass::ConvTransposeBNFusePass() {
ConvTransposeBNFusePass::ConvTransposeBNFusePass() { // NOLINT
AddOpCompat(OpCompat("conv2d_transpose"))
.AddInput("Input")
.IsTensor()
Expand Down Expand Up @@ -800,7 +800,8 @@ ConvTransposeBNFusePass::ConvTransposeBNFusePass() {
.End();
}

ConvTransposeEltwiseAddBNFusePass::ConvTransposeEltwiseAddBNFusePass() {
ConvTransposeEltwiseAddBNFusePass::
ConvTransposeEltwiseAddBNFusePass() { // NOLINT
AddOpCompat(OpCompat("conv2d_transpose"))
.AddInput("Input")
.IsTensor()
Expand Down Expand Up @@ -844,7 +845,7 @@ ConvTransposeEltwiseAddBNFusePass::ConvTransposeEltwiseAddBNFusePass() {
.End();
}

DepthwiseConvBNFusePass::DepthwiseConvBNFusePass() {
DepthwiseConvBNFusePass::DepthwiseConvBNFusePass() { // NOLINT
AddOpCompat(OpCompat("depthwise_conv2d"))
.AddInput("Input")
.IsTensor()
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/cost_model.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ using platform::MemEvent;

const double CostData::NOT_MEASURED = -1;

CostData::~CostData() {
CostData::~CostData() { // NOLINT
// TODO(zhhsplendid): when we save a copy of program/graph, we should delete
// here.
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ static const std::initializer_list<std::string> rnn_variable_names{

class ComputePropagateScalesMkldnnPassTest : public testing::Test {
public:
ComputePropagateScalesMkldnnPassTest() {
ComputePropagateScalesMkldnnPassTest() { // NOLINT
pass.reset(new ComputePropagateScalesMkldnnPass());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Graph;
PADDLE_ENFORCE_NOT_NULL( \
id, platform::errors::InvalidArgument("Subgraph has no node %s.", #id));

DepthwiseConvMKLDNNPass::DepthwiseConvMKLDNNPass() {
DepthwiseConvMKLDNNPass::DepthwiseConvMKLDNNPass() { // NOLINT
AddOpCompat(OpCompat("depthwise_conv2d"))
.AddInput("Input")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class Graph;
class DepthwiseConvMKLDNNPass : public FusePassBase {
public:
DepthwiseConvMKLDNNPass();
virtual ~DepthwiseConvMKLDNNPass() {}
virtual ~DepthwiseConvMKLDNNPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ namespace paddle {
namespace framework {
namespace ir {

Int8ScaleCalculationMkldnnPass::Int8ScaleCalculationMkldnnPass() {
Int8ScaleCalculationMkldnnPass::Int8ScaleCalculationMkldnnPass() { // NOLINT
AddOpCompat(OpCompat("conv2d"))
.AddInput("Input")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class Graph;
class Int8ScaleCalculationMkldnnPass : public FusePassBase {
public:
Int8ScaleCalculationMkldnnPass();
virtual ~Int8ScaleCalculationMkldnnPass() {}
virtual ~Int8ScaleCalculationMkldnnPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ void QuantizeConvInput(Scope* scope,

} // namespace

ParamsQuantizationMkldnnPass::ParamsQuantizationMkldnnPass() {
ParamsQuantizationMkldnnPass::ParamsQuantizationMkldnnPass() { // NOLINT
AddOpCompat(OpCompat("fused_conv2d"))
.AddInput("Input")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class Graph;
class ParamsQuantizationMkldnnPass : public FusePassBase {
public:
ParamsQuantizationMkldnnPass();
virtual ~ParamsQuantizationMkldnnPass() {}
virtual ~ParamsQuantizationMkldnnPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ struct TestScope {
};

struct ProgramStrategy {
virtual ~ProgramStrategy() {}
virtual ~ProgramStrategy() = default;

std::unique_ptr<Graph> CreateGraph() {
CreateProgram();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,8 @@ void FuseQuantTranspose2DequantOneDNNPass::ApplyImpl(Graph *graph) const {
FuseTranspose2Dequantize(graph, "transpose2");
}

FuseQuantTranspose2DequantOneDNNPass::FuseQuantTranspose2DequantOneDNNPass() {
FuseQuantTranspose2DequantOneDNNPass::
FuseQuantTranspose2DequantOneDNNPass() { // NOLINT
AddOpCompat(OpCompat("transpose2"))
.AddInput("X")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ namespace ir {

class FuseQuantTranspose2DequantOneDNNPass : public FusePassBase {
public:
virtual ~FuseQuantTranspose2DequantOneDNNPass() {}
virtual ~FuseQuantTranspose2DequantOneDNNPass() = default;
FuseQuantTranspose2DequantOneDNNPass();

protected:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ namespace ir {
GET_IR_NODE(reshape2_op); \
GET_IR_NODE(reshape2_out);

ShuffleChannelMKLDNNDetectPass::ShuffleChannelMKLDNNDetectPass() {
ShuffleChannelMKLDNNDetectPass::ShuffleChannelMKLDNNDetectPass() { // NOLINT
AddOpCompat(OpCompat("reshape2"))
.AddInput("X")
.IsTensor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class Graph;
class ShuffleChannelMKLDNNDetectPass : public FusePassBase {
public:
ShuffleChannelMKLDNNDetectPass();
virtual ~ShuffleChannelMKLDNNDetectPass() {}
virtual ~ShuffleChannelMKLDNNDetectPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/reverse_roll_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ namespace paddle {
namespace framework {
namespace ir {
class Node;
ReverseRollFusePass::ReverseRollFusePass() {
ReverseRollFusePass::ReverseRollFusePass() { // NOLINT
AddOpCompat(OpCompat("reshape2"))
.AddInput("X")
.IsTensor()
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/reverse_roll_fuse_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ namespace ir {
class ReverseRollFusePass : public FusePassBase {
public:
ReverseRollFusePass();
virtual ~ReverseRollFusePass() {}
virtual ~ReverseRollFusePass() = default;

protected:
void ApplyImpl(ir::Graph *graph) const override;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/shuffle_channel_detect_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ namespace ir {
GET_IR_NODE(reshape2_op); \
GET_IR_NODE(reshape2_out);

ShuffleChannelDetectPass::ShuffleChannelDetectPass() {
ShuffleChannelDetectPass::ShuffleChannelDetectPass() { // NOLINT
AddOpCompat(OpCompat("reshape2"))
.AddInput("X")
.IsTensor()
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/shuffle_channel_detect_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class Graph;
class ShuffleChannelDetectPass : public FusePassBase {
public:
ShuffleChannelDetectPass();
virtual ~ShuffleChannelDetectPass() {}
virtual ~ShuffleChannelDetectPass() = default;

protected:
void ApplyImpl(ir::Graph* graph) const override;
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/sigmoid_elementmul_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ SigmoidElementmulFusePattern::SigmoidElementmulFusePattern(

} // namespace patterns

SigmoidElementmulFusePass::SigmoidElementmulFusePass() {}
SigmoidElementmulFusePass::SigmoidElementmulFusePass() = default;

void SigmoidElementmulFusePass::ApplyImpl(ir::Graph* graph) const {
PADDLE_ENFORCE_NOT_NULL(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ namespace ir {

class Node;

TrtMapOpsToMatrixMultiplyPass::TrtMapOpsToMatrixMultiplyPass() {}
TrtMapOpsToMatrixMultiplyPass::TrtMapOpsToMatrixMultiplyPass() = default;

void TrtMapOpsToMatrixMultiplyPass::ApplyImpl(ir::Graph* graph) const {
PADDLE_ENFORCE_NOT_NULL(
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/yolo_box_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ struct YoloBoxPattern : public PatternBase {
};
} // namespace patterns

YoloBoxFusePass::YoloBoxFusePass() {}
YoloBoxFusePass::YoloBoxFusePass() = default;

void YoloBoxFusePass::ApplyImpl(ir::Graph* graph) const {
PADDLE_ENFORCE_NOT_NULL(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ InterpreterCoreNoEventGarbageCollector::
}

InterpreterCoreNoEventGarbageCollector::
~InterpreterCoreNoEventGarbageCollector() {
~InterpreterCoreNoEventGarbageCollector() { // NOLINT
queue_.reset(nullptr);
}

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/new_executor/new_executor_defs.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ VariableScope::VariableScope(Scope* scope) {
"You have passed a nullptr to construct VariableScope."));
}

VariableScope::~VariableScope() {}
VariableScope::~VariableScope() = default;

Scope* VariableScope::GetMutableScope() const { return scope_; }

Expand Down
Loading