Skip to content

Commit

Permalink
[Eager] delete final state pre-name (#45306)
Browse files Browse the repository at this point in the history
  • Loading branch information
wanghuancoder authored Aug 26, 2022
1 parent 2dca718 commit 126940b
Show file tree
Hide file tree
Showing 192 changed files with 2,589 additions and 2,567 deletions.
6 changes: 3 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,14 @@ paddle/infrt/dialect/pd/common/pd_ops_info.h
paddle/infrt/tests/dialect/Output
paddle/infrt/tests/lit.cfg.py
paddle/infrt/kernel/phi/infershaped/infershaped_kernel_launchers.cc
paddle/fluid/pybind/eager_final_state_op_function.cc
paddle/fluid/pybind/eager_op_function.cc

# these files (directories) are generated before build system generation
paddle/fluid/operators/generated_op.cc
paddle/phi/ops/compat/generated_sig.cc
paddle/phi/api/yaml/parsed_apis/
python/paddle/utils/code_gen/
paddle/fluid/pybind/tmp_eager_final_state_op_function_impl.h
paddle/fluid/pybind/eager_final_state_op_function_impl.h
paddle/fluid/pybind/tmp_eager_op_function_impl.h
paddle/fluid/pybind/eager_op_function_impl.h
paddle/fluid/pybind/eager_op_function_impl.h
paddle/fluid/pybind/op_function_impl.h
2 changes: 1 addition & 1 deletion paddle/fluid/eager/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ if(NOT ((NOT WITH_PYTHON) AND ON_INFER))
grad_tensor_holder
SRCS grad_tensor_holder.cc
DEPS grad_node_info gradient_accumulator)
add_dependencies(grad_tensor_holder eager_final_state_codegen)
add_dependencies(grad_tensor_holder eager_codegen)
cc_library(
backward
SRCS backward.cc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ if(NOT (NOT WITH_PYTHON AND ON_INFER))
final_dygraph_node
SRCS nodes.cc ${eager_manual_nodes}
DEPS ${eager_deps})
add_dependencies(final_dygraph_node eager_final_state_codegen)
add_dependencies(final_dygraph_node eager_codegen)
endif()
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ if(NOT (NOT WITH_PYTHON AND ON_INFER))
final_dygraph_function
SRCS dygraph_functions.cc ${eager_manual_functions}
DEPS ${eager_deps})
add_dependencies(final_dygraph_function eager_final_state_codegen)
add_dependencies(final_dygraph_function eager_codegen)
endif()
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

#include "paddle/phi/api/include/tensor.h"

paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::experimental::Tensor add_n_dygraph_function(
const std::vector<paddle::experimental::Tensor>& x);

paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::experimental::Tensor conv2d_dygraph_function(
const paddle::experimental::Tensor& input,
const paddle::experimental::Tensor& filter,
std::vector<int> strides,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
#pragma GCC diagnostic ignored "-Wunused-variable"
DECLARE_bool(check_nan_inf);

paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::experimental::Tensor add_n_dygraph_function(
const std::vector<paddle::experimental::Tensor>& x) {
// Dygraph Record Event
paddle::platform::RecordEvent dygraph_entrance_record_event(
Expand All @@ -46,7 +46,7 @@ paddle::experimental::Tensor add_n_final_state_dygraph_function(
paddle::imperative::AutoCastGuard guard(
egr::Controller::Instance().GetCurrentTracer(),
paddle::imperative::AmpLevel::O0);
return add_n_final_state_dygraph_function(NEW_x);
return add_n_dygraph_function(NEW_x);
}
}

Expand All @@ -56,7 +56,7 @@ paddle::experimental::Tensor add_n_final_state_dygraph_function(
std::vector<egr::AutogradMeta*>* x_autograd_meta = &x_autograd_meta_vec;
// Forward API Call
VLOG(3) << "Final State Running: "
<< "add_n_final_state_dygraph_function";
<< "add_n_dygraph_function";
auto api_result = paddle::experimental::add_n(x);
// Check NaN and Inf if needed
if (FLAGS_check_nan_inf) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
#pragma GCC diagnostic ignored "-Wunused-variable"
DECLARE_bool(check_nan_inf);

paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::experimental::Tensor conv2d_dygraph_function(
const paddle::experimental::Tensor& input,
const paddle::experimental::Tensor& filter,
std::vector<int> strides,
Expand Down Expand Up @@ -59,17 +59,17 @@ paddle::experimental::Tensor conv2d_final_state_dygraph_function(
paddle::imperative::AutoCastGuard guard(
egr::Controller::Instance().GetCurrentTracer(),
paddle::imperative::AmpLevel::O0);
return conv2d_final_state_dygraph_function(NEW_input,
NEW_filter,
strides,
paddings,
paddding_algorithm,
groups,
dilations,
data_format,
use_addto,
workspace_size_MB,
exhaustive_search);
return conv2d_dygraph_function(NEW_input,
NEW_filter,
strides,
paddings,
paddding_algorithm,
groups,
dilations,
data_format,
use_addto,
workspace_size_MB,
exhaustive_search);
}
}

Expand All @@ -80,7 +80,7 @@ paddle::experimental::Tensor conv2d_final_state_dygraph_function(
egr::EagerUtils::nullable_autograd_meta(filter);
// Forward API Call
VLOG(3) << "Final State Running: "
<< "conv2d_final_state_dygraph_function";
<< "conv2d_dygraph_function";
auto api_result = paddle::experimental::conv2d(input,
filter,
strides,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ AddNGradNodeFinal::operator()(

// dygraph function
for (size_t i = 0; i < returns[0].size(); i++) {
returns[0][i] = ::scale_final_state_dygraph_function(
out_grad, phi::Scalar(1.0), 0.0, true);
returns[0][i] =
::scale_dygraph_function(out_grad, phi::Scalar(1.0), 0.0, true);
}

// Check NaN and Inf id needed
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/eager/auto_code_generator/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
add_subdirectory(final_state_generator)
add_subdirectory(generator)

set(EAGER_GENERETOR_DEPS
${GLOB_OP_LIB}
Expand Down Expand Up @@ -88,7 +88,7 @@ if(WIN32)
endif()

add_custom_target(
eager_codegen
legacy_eager_codegen
COMMAND
"${eager_generator_path}/eager_generator.exe"
"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated"
Expand All @@ -97,7 +97,7 @@ if(WIN32)
VERBATIM)
else()
add_custom_target(
eager_codegen
legacy_eager_codegen
COMMAND
${CMAKE_COMMAND} -E env
"LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}:${CMAKE_CURRENT_BINARY_DIR}/../../pybind"
Expand Down
73 changes: 37 additions & 36 deletions paddle/fluid/eager/auto_code_generator/eager_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ namespace framework {

// To handle append_op at python-level
std::unordered_map<std::string, std::vector<std::string>>
core_ops_returns_info = {};
std::unordered_map<std::string, std::vector<std::string>> core_ops_args_info =
{};
core_ops_legacy_returns_info = {};
std::unordered_map<std::string, std::vector<std::string>>
core_ops_args_type_info = {};
core_ops_legacy_args_info = {};
std::unordered_map<std::string, std::vector<std::string>>
core_ops_legacy_args_type_info = {};

/* --- Static maps to handle corner cases --- */
static std::unordered_map<std::string, paddle::framework::AttributeMap>
Expand Down Expand Up @@ -1473,10 +1473,10 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(

std::string dygraph_function_args_str = "";
std::string amp_function_call_args_str = "";
core_ops_args_info[op_type] = {};
core_ops_args_type_info[op_type] = {};
core_ops_args_info[op_type].resize(in_vars.size());
core_ops_args_type_info[op_type].resize(in_vars.size());
core_ops_legacy_args_info[op_type] = {};
core_ops_legacy_args_type_info[op_type] = {};
core_ops_legacy_args_info[op_type].resize(in_vars.size());
core_ops_legacy_args_type_info[op_type].resize(in_vars.size());

/* ------ Dygraph forward function generation ------ */
generated_function_body += " // Dygraph Forward Pass\n";
Expand All @@ -1500,7 +1500,7 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
amp_function_call_args_str_list[input_position] =
" NEW_" + LegalizeVarName(input_name);

core_ops_args_type_info[op_type][input_position] = "list";
core_ops_legacy_args_type_info[op_type][input_position] = "list";
} else {
// inplace tensor can't be const
const char* FWD_INS_ARG_TEMPLATE;
Expand All @@ -1522,9 +1522,9 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
amp_function_call_args_str_list[input_position] =
" NEW_" + LegalizeVarName(input_name);

core_ops_args_type_info[op_type][input_position] = "tensor";
core_ops_legacy_args_type_info[op_type][input_position] = "tensor";
}
core_ops_args_info[op_type][input_position] = input_name;
core_ops_legacy_args_info[op_type][input_position] = input_name;

if (input.dispensable()) continue;

Expand Down Expand Up @@ -1666,15 +1666,15 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
dygraph_function_args_str += arg_str;
amp_function_call_args_str += (", " + LegalizeVarName(output_var_name));

core_ops_args_type_info[op_type].push_back("list");
core_ops_legacy_args_type_info[op_type].push_back("list");
} else {
const char* FWD_NUM_ARG_TEMPLATE = ", paddle::experimental::Tensor* %s";
std::string arg_str = paddle::string::Sprintf(
FWD_NUM_ARG_TEMPLATE, LegalizeVarName(output_var_name));
dygraph_function_args_str += arg_str;
amp_function_call_args_str += (", " + LegalizeVarName(output_var_name));

core_ops_args_type_info[op_type].push_back("tensor");
core_ops_legacy_args_type_info[op_type].push_back("tensor");
}

if (BeSameAsInput(output_name, input_names)) {
Expand All @@ -1693,7 +1693,7 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
output_name,
LegalizeVarName(output_var_name));
}
core_ops_args_info[op_type].push_back(output_name);
core_ops_legacy_args_info[op_type].push_back(output_name);

} else if (!forward_inplace_map.empty() &&
forward_inplace_map.count(output_name)) {
Expand Down Expand Up @@ -1727,8 +1727,8 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
"{ \"%s\", egr::EagerUtils::CreateVars(%s) },";
outs_contents_str += paddle::string::Sprintf(
FWD_OUTS_CONTENT_TEMPLATE, output_name, outnum);
core_ops_args_info[op_type].push_back(outnum);
core_ops_args_type_info[op_type].push_back("int");
core_ops_legacy_args_info[op_type].push_back(outnum);
core_ops_legacy_args_type_info[op_type].push_back("int");
} else {
const char* FWD_OUTS_CONTENT_TEMPLATE =
"{ \"%s\", "
Expand Down Expand Up @@ -2003,10 +2003,11 @@ static std::pair<std::string, std::string> GenerateForwardFunctionContents(
VLOG(6) << "Converted Output VarBase to EagerVariable(s)";
/* ------ END Generate TraceOp ----- */

// [Generation] Handle core_ops_returns_info
// avoid inplace op changing core_ops_returns_info
if (core_ops_returns_info.empty() || !core_ops_returns_info.count(op_type)) {
core_ops_returns_info[op_type] = return_contents;
// [Generation] Handle core_ops_legacy_returns_info
// avoid inplace op changing core_ops_legacy_returns_info
if (core_ops_legacy_returns_info.empty() ||
!core_ops_legacy_returns_info.count(op_type)) {
core_ops_legacy_returns_info[op_type] = return_contents;
}

// [Generation] ComputeRequireGrad -> GradNodeCreation
Expand Down Expand Up @@ -2983,13 +2984,13 @@ static std::string GenerateDygraphHFileIncludes() {

dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_info;\n";
"core_ops_legacy_args_info;\n";
dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_type_info;\n";
"core_ops_legacy_args_type_info;\n";
dygraph_forward_api_includes_str +=
"extern std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_returns_info;\n\n";
"core_ops_legacy_returns_info;\n\n";

return dygraph_forward_api_includes_str;
}
Expand Down Expand Up @@ -3060,7 +3061,7 @@ static void GenerateNodeCCFile(const std::string& node_cc_path,
static std::string ConvertCoreOpsInfosToString(
const std::unordered_map<std::string, std::vector<std::string>>&
core_ops_info) {
std::string core_ops_returns_info_init_str = "";
std::string core_ops_legacy_returns_info_init_str = "";
for (const auto& iter : core_ops_info) {
const char* Core_Ops_Returns_TEMPLATE = "{ \"%s\", { %s } },\n";
const std::string& op_type = iter.first;
Expand All @@ -3074,23 +3075,23 @@ static std::string ConvertCoreOpsInfosToString(
if (returns_str.size() > 0) returns_str.pop_back();
std::string op_type_init_str = paddle::string::Sprintf(
Core_Ops_Returns_TEMPLATE, op_type, returns_str);
core_ops_returns_info_init_str += op_type_init_str;
core_ops_legacy_returns_info_init_str += op_type_init_str;
}

// Remove trailing ','
if (core_ops_returns_info_init_str.size() > 0)
core_ops_returns_info_init_str.pop_back();
if (core_ops_legacy_returns_info_init_str.size() > 0)
core_ops_legacy_returns_info_init_str.pop_back();

return core_ops_returns_info_init_str;
return core_ops_legacy_returns_info_init_str;
}

static std::string GenerateCoreOpsArgsInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_info = { %s };\n";
"core_ops_legacy_args_info = { %s };\n";

std::string core_ops_args_info_init_str =
ConvertCoreOpsInfosToString(core_ops_args_info);
ConvertCoreOpsInfosToString(core_ops_legacy_args_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_args_info_init_str);
Expand All @@ -3101,10 +3102,10 @@ static std::string GenerateCoreOpsArgsInfo() {
static std::string GenerateCoreOpsArgsTypeInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_args_type_info = { %s };\n";
"core_ops_legacy_args_type_info = { %s };\n";

std::string core_ops_args_type_info_init_str =
ConvertCoreOpsInfosToString(core_ops_args_type_info);
ConvertCoreOpsInfosToString(core_ops_legacy_args_type_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_args_type_info_init_str);
Expand All @@ -3115,13 +3116,13 @@ static std::string GenerateCoreOpsArgsTypeInfo() {
static std::string GenerateCoreOpsReturnsInfo() {
const char* Core_Ops_Returns_MAP_TEMPLATE =
"std::unordered_map<std::string, std::vector<std::string>> "
"core_ops_returns_info = { %s };\n";
"core_ops_legacy_returns_info = { %s };\n";

std::string core_ops_returns_info_init_str =
ConvertCoreOpsInfosToString(core_ops_returns_info);
std::string core_ops_legacy_returns_info_init_str =
ConvertCoreOpsInfosToString(core_ops_legacy_returns_info);

std::string core_ops_info_str = paddle::string::Sprintf(
Core_Ops_Returns_MAP_TEMPLATE, core_ops_returns_info_init_str);
Core_Ops_Returns_MAP_TEMPLATE, core_ops_legacy_returns_info_init_str);

return core_ops_info_str;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def GenerateFileStructureForIntermediateDygraph(eager_dir, split_count):
".tmp.cc\" \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/nodes/nodes"
+ str(i + 1) + ".cc\"\n")

f.write(" DEPENDS eager_codegen\n")
f.write(" DEPENDS legacy_eager_codegen\n")
f.write(" VERBATIM)\n")

f.write("cc_library(dygraph_node SRCS ")
Expand Down Expand Up @@ -154,7 +154,7 @@ def GenerateFileStructureForIntermediateDygraph(eager_dir, split_count):
f.write(
" COMMAND ${CMAKE_COMMAND} -E copy_if_different \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/forwards/dygraph_forward_functions_returns_info.tmp.cc\" \"${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/fluid_generated/forwards/dygraph_forward_functions_returns_info.cc\"\n"
)
f.write(" DEPENDS eager_codegen\n")
f.write(" DEPENDS legacy_eager_codegen\n")
f.write(" VERBATIM)\n")

f.write("cc_library(dygraph_function SRCS ")
Expand Down
Loading

0 comments on commit 126940b

Please sign in to comment.