Skip to content

Commit

Permalink
【Hackathon 6th Fundable Projects 4 No.3】remove ExecutionStrategy (P…
Browse files Browse the repository at this point in the history
…addlePaddle#65077)

* remove ExecutionStrategy

* resolve conflict
  • Loading branch information
ccsuzzh authored and co63oc committed Jun 25, 2024
1 parent 8d140c3 commit 455789b
Show file tree
Hide file tree
Showing 5 changed files with 2 additions and 63 deletions.
51 changes: 0 additions & 51 deletions paddle/fluid/framework/details/execution_strategy.h

This file was deleted.

1 change: 0 additions & 1 deletion paddle/fluid/framework/details/op_handle_base.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
#include <unordered_set>
#include <vector>

#include "paddle/fluid/framework/details/execution_strategy.h"
#include "paddle/fluid/framework/details/var_handle.h"
#include "paddle/fluid/framework/ir/node.h"
#include "paddle/fluid/platform/device_context.h"
Expand Down
8 changes: 0 additions & 8 deletions paddle/fluid/framework/distributed_strategy.proto
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,6 @@ message BuildStrategy {
optional bool fuse_resunit = 21 [ default = false ];
}

message ExecutionStrategy {
optional int32 num_threads = 1 [ default = 1 ];
optional int32 num_iteration_per_drop_scope = 2 [ default = 10 ];
optional int32 num_iteration_per_run = 3 [ default = 1 ];
optional bool use_thread_barrier = 4 [ default = false ];
}

message GradientScaleConfig {
// Optional value ['avg', 'sum', 'customized']
// If avg, loss@grad will be divided by the number of devices,
Expand Down Expand Up @@ -428,7 +421,6 @@ message DistributedStrategy {
optional QatConfig qat_configs = 117;

optional BuildStrategy build_strategy = 201;
optional ExecutionStrategy execution_strategy = 202;
optional GradientScaleConfig gradient_scale_configs = 203;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,6 @@ bool MemoryReusePass::IsInVarReusable(const details::VarHandle &in_var) const {
* - it is the first version var. Otherwise, the var may be overwritten
* in the second batch, which results in wrong calculation result.
* It is critical especially when
* ExecutionStrategy::num_iteration_per_drop_scope_ > 1.
* - it has not reused other var's memory. It is not necessary to do memory
* reuse twice for the same var.
* - it is not a persistable var.
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/distributed/fleet/base/distributed_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,8 @@ def __init__(self):
DistributedStrategy can be serialized into protobuf file or deserialized from protobuf file
Users who run local training usually configure BuildStrategy and ExecutionStrategy, and
DistributedStrategy supports configurations from BuildStrategy and ExecutionStrategy
Users who run local training usually configure BuildStrategy, and
DistributedStrategy supports configurations from BuildStrategy.
"""
self.strategy = distributed_strategy_pb2.DistributedStrategy()
Expand Down

0 comments on commit 455789b

Please sign in to comment.